var/home/core/zuul-output/0000755000175000017500000000000015134334264014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134361700015471 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000400713315134361533020262 0ustar corecore[qikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD ~"mv?_eGbuuțx{w7ݭ7֫gd% oo/q3m^]/o?8.7oW}ʋghewx/mX,ojŻ ^Tb3b#׳:}=p7뼝ca㑔`e0I1Q!&ѱ[/o^{W-{t3_U|6 x)K#/5ΌR"ggóisR)N %emOQ/Ϋ[oa0vs68/Jʢ ܚʂ9ss3+aô٥J}{37FEbп3 FKX1QRQlrTvb)E,s)Wɀ;$#LcdHM%vz_. o~I|3j dF{ "IΩ?PF~J~ ` 17ׅwڋًM)$Fiqw7Gt7L"u 0V9c  ˹dvYļU[ Z.׿-h QZ*U1|t5wKOؾ{mk b2 ܨ;RJK!b>JR*kl|+"N'C_#a7]d]sJg;;>Yp׫,w`ɚ'd$ecwŻ^~7EpQС3DCS[Yʧ?DDS aw߾)VxX帟AB}nyи0stĈCo.:wAZ{sy:7qsWctx{}n-+ZYsI{/.Ra9XcђQ0FK@aEDO2es ׇN# ZF͹b,*YVi+$<QMGhC}^}?BqG!(8l K3T[<~6]90}(*T7siv'=k 9Q2@vN ( R['>v*;o57sp$3ncx!>t®W>]tF-iܪ%GYbaRvHa}dkD̶*';ک|s_}8yj,('GrgTZ'U鋊TqOſ * /Ijo!՟8`"j}zӲ$k3jS|C7;A)͎V.r?t\WU1ojjr<~Tq> `=tJ!aݡ=h6Yݭw}?lѹ`f_" J9w4ts7NG GGG]ҡgc⌝M b/Ζlpah E ur C&`XR JcwB~R2EL9j7e\(Uё$׿atyХ?*t5z\+`/ErVQUxMҔ&ۈt.3;eg_O ξL1KiYLizpV:C5/=v-}҅"o ']쌕|tϓX8nJ*A*%J[T2pI1Je;s_[,Ҩ38_ь ͰM0ImY/MiVJ5&jNgBt90v߁R:~U jځU~oN9xԞ~J|dݤ߯R> kH&Y``:"s ayiBq)u%'4 yܽ yW0 -i̭uJ{KưЖ@+UBj -&JO x@}DS.€>3T0|9ē7$3z^.I< )9qf e%dhy:O40n'c}c1XҸuFiƠIkaIx( +")OtZ l^Z^CQ6tffEmDφǽ{QiOENG{P;sHz"G- >+`قSᔙD'Ad ѭj( ہO r:91v|ɛr|٦/o{C Ӹ!uWȳ)gjw&+uߕt*:͵UMQrN@fYDtEYZb4-UCqK٪L.2teB ˛"ո{Gci`du듎q+;C'16FgVlWaaB)"F,u@30YQg˾_YҊŏ#_f^ TD=VAKNl4Kš4GScѦa0 J ()¾5m'p/\խX\=z,Mw˭x:qu礛WԓL!I xӤ1(5AKRVF2ɌУլ F "vuhc=JS\kkZAY`R"Hr1]%oR[^oI]${&L8<=#0yaKL: JJl r;t#H+B|ɧJiM cm)>H=l}.^\ݧM<lu Y> XH\z:dHElL(uHR0i#q%]!=t_쾋-, vW~* ^g/5n]FhNU˿oۂ6C9C7sn,kje*;iΓA7,Q)-,=1A sK|ۜLɽy]ʸEO<-YEqKzϢ \{>dDLF amKGm+`VLJsC>?5rk{-3Ss`y_C}Q v,{*)ߎ% qƦat:D=uNvdߋ{Ny[$ {ɴ6hOI']dC5`t9:GO: FmlN*:g^;T^B0$B%C6Θ%|5u=kkN2{'FEc* A>{avdt)8|mg定TN7,TEXt+`F P |ɧ<Ғ8_iqE b}$B#fethBE;1"l r  B+R6Qp%;R8P󦟶Ub-L::;Ⱦ7,VW.JE:PgXoΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t!X򴱞_aM:E.Qg1DllЊE҉L ehJx{̗Uɾ?si&2"C]u$.`mjmƒVe9f6NŐsLu6fe wkىKR%f"6=rw^)'Hz }x>1yFX09'A%bDb0!i(`Z;TyֻΗ|ִ0-6dAC5t[OM91c:VJR9&ksvJ;0ɝ$krogB= FYtЩOte=?>T&O{Ll)HClba1PIFĀ":tu^}.&R*!^pHPQuSVO$.KMb.:DK>WtWǭKv4@Va3"a`R@gbu%_J5Ґ 3?lm$K/$s_. WM]̍"W%`lO2-"ew@bM?O %VO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))OrT 9KaͶ{߰+ednU$YD',jߎmc\cN#0"",tw>]rύW -a]hx&gs7,6BxzxօoFMA['҉F=NGD4sTq1HPld=Q,Dz`{|puB"8#YkrZ .`h(eek[?̱ՒOOc&!dVzMEHH*V"MC Q=1Omsz/v0vȌJBIG,CNˆ-L{L #cNqgVR2r뭲⭊ڰ08uirQ qNUӛ<|߈$m뫷dùB Z^-_dsz=F8jH˽&DUh+9k̈́W^̤F˖.kL5̻wS"!5<@&] WE\wMc%={_bD&k 5:lb69OBCC*F!6~ö9M^\r\ߺnqZV@z%=\#|-3ڝa$ΫM|-LsXY r# v&讳YE 6X̀v"@L'aE p6mD[%ZZv'#NC5CL]5ݶI5XK.N)Q!>zt?zpPC ¶.vBTcm"Bsp rjWhUuta^xN@˭d- T5 $4ذufw}}1L @5DO'h‡k;q 7= `!6зd B0C?]lja~ luq=T#>1k*Dls6vP9hS  ehC.3 @6ijvUuBY hBnb[ Fr#D7ćlA!:X lYE>#0JvʈɌ|\u,'Y˲.,;oOwOj-25Hݳ7 li0bSlbw=IsxhRbd+I]Y]JP}@.供SЃ??w w@KvKts[TSa /ZaDžPAEư07>~w3n:U/.P珀Yaٳ5Ʈ]խ4 ~fh.8C>n@>T%TDNIGW .Z#evkVv._WiQJͮݔ\Zťz;sh4BΈ l8f(q*72"DB&&-TeD1ZrbkI%8z}ݛwu0{ѩ2ْM4tޖӫgHKT~~[= LfZ eWzRSrkICd ûQÝBsN&4KG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_qOZ釋rTG_7:0@Iuʙ?&Ԕ8e,žLG"1lͧQѶGM]}yxZl 0JM"d.=`Yƚ^"J?}>8ϵq\FOXƀf qbTLhlw?8p@/u7ި!:E#s:ic.XC^wT/]n2'>^&pnapckL>2QQWo/ݳ<̍8)r`F!Woc0Xq]P)\wEZ(VҠQBT^e^0F;)CtT+{`Bh"% !.bBQPnT4ƈRa[F=3}+BVE~8R{3,>0|:,5j358W]>!Q1"6oT[ҟT;725Xa+wqlR)<#!9!籈K*:!@NI^S"H=ofLx _lp ꖚӜ3C 4dM @x>ۙZh _uoֺip&1ڙʪ4\RF_04H8@>>fXmpLJ5jRS}D ?U4x[c) ,`̔Dvckk5Ťã0le۞]o~oW>(91ݧ$uxp/Cq6Un9%ZxðvGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kGU/m >6JXa5FA@ q}4BooRe&#Tk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊$AZLk;3qUlWU Ry==qrwMm[eG`̵E$uLrk-$_{$# $B*hN/ٟKVMٞM9$1#HR1(7x]mD@0ngd6#eMy"[ ^Q $[d8  i#i8YlsI!2(ȐP'3ޜb6xo^fmIx nf^Lw>"0(HKkD4<80: M:'֥P!r "Lӓݰ@ 9n# " $fGgKQӦ4}Gn\^=-Y5PI dPN6 Ozځ/פ|5) F[ڣ$2*%&h v%9HN H~Q+oi?&۳)-nqK?2ސv/3,9ҮT9Cef˝49i.2DxatC<8iR/ƬйR֌vN8J"iJ. T>)qaY4ͬlyg "]BvW#99`TegõII kюHLa^c&/H^FFIu`2a$mc Ry+R:LڕDܓ>Y:]t.+|PT6=qWe0NƏw<6o3mv8k vGOfpEOkÈWȤMف lOc;SR&.w,qk>MPs+Xh4iyuGRd֞q鮺]m S{}]U kV0/ŜxtADx"Xh4|;XSxߵă@pE:y]/"(MCG`ʶϊGi+39#gNZYE:Qw9muB`9`LDhs4Ǩ9S`EkM{zB<˙ik; JD;;3!4 2Y.$Dwiu|+lO:k$]ԜYLUҞ6EmH>azʳ/A+ԀZk"f`.,ל{=wh|_qYj5M{K$gv>cDp"'0޽5xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=n8D-9޵JKw5ُJ,􋃓ZUꋼ0b1f87GՂ 1t_o}{Mr7KO0Ao-Y*Is\S:JzA(:i!eҎ\,f+,Ąt78~ڋ~?[F^.A'!,iGow3{'YToҝf5ޓ[he>=7S8DGZ@-#]f:Tm?L{F-8G#%.fM8Y='gیl0HڜHLK'Cw#)krWIk<1څ 9abHl:b3LjOq͂Ӥ=u8#E2;|z꽐vɀi^lUt␚ɓW%OVc8|*yI0U=nFGA`IC8p+C:!}Nh,mn>_MGiq'N~|z`|mu}r:"KiyGҪ$& hw#4qn?ܶХfm_Ov^ܶ[6j3ZN9t9ZMMM)I[Rχ/C|W䳮yI3MڼH9iEG&V 'x`u.̀ab7V<*EzfH{]:*6M x-v쳎M'.hO3p-IGh ܆hR ]zi2hB9'S_;I/d0oIU:m/~[*K1QA="D:V&f:{7N>^uU` c/X)mS5KC߄":{H)"%,!3w{"ZWÂk>/F?RJ>FIY*%5Hg}3Ď89؟N/pgÞ tJXB-Gjsٶ 3Gzp؍H|*cyp@\첹,[up`uV,\KCB\qGiW痃[?i?S{eϻl71X:݌>EEly(*SHN:ӫOq{{L$?Q{϶(F_Ej>3mqfΤP-j)H˧&8?a?2xĐ+EV؍x0bv6 fd1^ 2ӎԥ sZR cgu/bn/34'h9Dݥ:U:vV[ 'Mȥ@ەX㧿-p0?Q6 y2XN2_h~Cֆ֙82)=Ȓ7D- V)T? O/VFeUk'7KIT, WeՔ}-66V؅ʹ;T$pZ#@L; ?0]"2v[hׂ'cJ6H4bs+3(@z$.K!#Šj2ݢxK-di +9Hᇷ絻+ O.i2.I+69EVyw8//|~<ëng)P<xͯ~? fp,CǴ_BjDN^5)s('cBh+6ez0)_~zJz"ё`Z&Z![0rGBK 5G~<:H~W>;ٍVnSt%_!BZMMeccBҎÒJH+"ūyR}X~juPp- j\hЪQxchKaS,xS"cV8i8'-sOKB<չw"|{/MC8&%Og3E#O%`N)p#4YUh^ ɨڻ#Ch@(R &Z+<3ݰb/St=&yo|BL,1+t C<ˉvRfQ*e"T:*Dᰤ*~IClz^F6!ܠqK3%$E)~?wy,u'u() C>Gn} t]2_}!1NodI_Bǂ/^8\3m!'(Ֆ5Q&xo 8;'Jbo&XL_ʣ^^"Lq2E3,v1ɢu^}G7Z/qC^'+HDy=\]?d|9i,p?߼=\Ce"|Rݷ Q+=zxB.^Bld.HSntºB4~4]%.i|҂"? ~#ݤ[tfv3Ytck0O ͧ gP\|bЯ݃5H+v}e\0zE|!@E " ;9Ώɗf3kZc7B 8yݪkIf-8>V#ہll/ؽnA(ȱbAj>C9O n6HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2O⠪R/r| w,?VMqܙ7'qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z] +RlWDžuF7LFֆM~ar*EtIbW>jqour?qzJJaQ#-n`/$fhnqgTĔO5 ꐌSYXzv9[ezksA`<dkON৯s|&*pNaJه5B5H:W2% `6MRR'xZtfC$1aH_dx$1'/v^ZZ4`9);q`F"d1v>ժbLGd~MP%m x52LMF9 E"A,S Vo}\"X.2< 5FB΢u.`aJ#Tk’"D#cuCXȉ4 ՖK(KP|dZ1&8{9rLnMRф%V Ng2K|`ot.GSGd oE'!B'Nb1{8LW^9KbN;sö!`0ݘ/l+1L#B8U֕&*?V6N{I_Pĝ"] rT [eTr؟˰ ]\ h! v˱>5S1px fnk}sRmA>d2UAkؖvlX܇Bz1U_#Xӫ+al H d\k/I,k,ρ|`zR/$@8VU^rcG"E7\qtS:ڝUyy >Vc11*?xYa8U`Jw/AcL~|;yj8TR#s"Q.ϊ/Yrx+u6*27fǪC%+A~*Zآ'ѭnۡ|< a1s\ T5҃F<[@ ,&,]$*բk-Yv5 '1T9!(*t 0'b@񲱥-kc6VnR0h& 0Z|ђ8 CGV[4xIIWN?Yt>lf@ Vi`D~ڇŁQLLkY <ZPKoma_u` !>Z;3F\dEB n+0Z ?&s{ 6(E|<ޭLk1Yn(F!%sx]>CTl9"و5 |ݹր|/#.w0ޒx"khD?O`-9C| &8֨O8VH5uH)28 Ǿ-R9~ +#e;U6]aD6Xzqd5y n';)VKL]O@b OIAG Lmc 2;\d˽$Mu>WmCEQuabAJ'`uy-u.M>9VsWٔo RS`S#m8k;(WAXq 8@+S@+' 8U˜z+ZU;=eTtX->9U-q .AV/|\ǔ%&$]1YINJ2]:a0OWvI.O6xMY0/M$ *s5x{gsəL3{$)ՆbG(}1wt!wVf;LҼé1ivGgJ+u7Τ!ljK1SpHR>:YF2cU(77eGG\ m#Tvmە8[,)4\\=V~?C~>_) cxF;;Ds'n [&8NJP5H2Զj{RC>he:ա+e/.I0\lWoӊĭYcxN^SPiMrFI_"*l§,̀+ å} .[c&SX( ( =X?D5ۙ@m cEpR?H0F>v6A*:W?*nzfw*B#d[se$U>tLNÔ+XX߇`cu0:U[tp^}{~Yhi+lQ&Z!֨řoҒ"HKX 6„=z{Ҍ5+P1;ڇ6UNE@Uo/>8.fgW]kY0Cgcu6/!_Ɩ} ' Ў3)X<seWfSv!ؒRKfs%(1Lhrٵ L.] sY4Ew`[x٘p,>9V"R1I>bJ` UL'5m1Ԥ:t6I >jz(:W֪Ƹ)!eoB4F\jtctUb.L[3M8V|&jZz/@7aV),A[5TpUZL_?ǯ*|,LyVIUI':4^6& t,O669Ȁ,EʿkڍfC58$5?DX F_02\B~Is 2sO/I!}xV&\b<9$4Nve^آ=I^=bKߙԘ1"+< gb`:~Ihl"য*6 n?HQ=GOf"8vAq&*țTOWse~ (5TX%/8vS:w}[ą 8W'0=.T4G! H/ͺ|@lX)+?qoKzu~ ae9} x* zj.|-bb^HcUha9ce1Lrsi_]nl-w/38ѮI*/=2!={2])|Ը>U{s]^l`+ ja^9c5~nZjA|ЩJs Va[~ۗ#rri# zLdMl?6o AMҪ1Ez&I2Wwߎ|7.sW\zk﯊溺^TW^T\*6eqr/^T77WNZ7F_}-򲺺VWQ77V\_v?9?"Th $LqQj0ւcz/Adh.$@bѨLtT=cKGX nݔ͆!`c|Lu_~ǴT?cr$dts` P}KNf@r3 Zj/}I+ϟYSUkTa>x!8 J&[V =͋A,z`S,J|L/vrʑ=}IhM4fG7QIc(<ǩJi lc*n;?mdI/Jl:NfqI &)q[x[ERnYI398bWucל#+$1ZɅL$i,*γ'[Jd f 4զ(e1V%ɗxOo<\ Ǫ H` QJW#ˢ+y+mlʱ4ƪ(L\BI~3ZF5/}sg*8+nϒ/yTH!Uaj/o"[Gq*Sh-ţ2R,& qHh5iAU=o#CUn9nj |34HFͱ@ƪ. =#\ȱ ^`dx&Q2=ƦE5EZS"/q&xO}sWc@<8/=2,o{<%e+FtJV؟a~dri|1g!cKgy| д񲸲t_WHgzʴ",B=2DSpڡ) 5鸑"hҚar0Yq}YъgjL{#m~Tb*4~,_nsayOZ1h D40O]d#OdtkǮn璙^hG0 ;Ha o/e>d/IOS2"/I_BRNo-,&f랯YAhhuaNH~wS^̖kK%&)dHxsp>N ݰ/t°.tB/L{j̳^;/<*ӰG-( ~"F&q8pi_y|d[c/!OoxD/c=7#;pLpm6~R#x/=R Y~V;;ludvQwD4*KX.os,ZQ=s"#LPp0)y?@߫Ky DD [.xOTbZcm,㲆RkEwg>c<3mig H K7LӲ,m@Q|5yɄ#ak2Ĉ8kAfUɋGT-|^k>?-3o O4m͚VƩd&<E^)0k)^MIKbj7 ONP˘Oꅜ֤*d>2a~L'RNu]TgtWa5yuYUk]jW -#trb7^XL#ǟNHȣ/٘DgߛZ\It6krK'ȨSV# ѳ4U^'U3Nj;z}|ݶE6^aőG4=oL'G PeSP0Bf 8Ź 2o@c*<H&C.)`S$(3@D8Xy־>G"؛υqgUȳ?g[oH'6Wq6~tm\ 8ߐ08ߵ֠%t݌jAmx* z'g}2=w8+&iv۰Ϧu.EIh:cuEuC4cww9bGɛVI$xtI0,42xk;ĕ],4$ nOyS#k6g@⺃9tLy)O6_vaU?)l}w@o?$s!ޏ?| Ò)ʮC|EH`![K0c?F.!oGu)yz=ew<-yswg'-/?e$*6Cyu۫|iqPn&q }BĨ%qׇL'U,pD_L"/"TDxv@ ^غF`8F+|]WhEaEaixL*K2AZ,^ÃYQ Y33E,hPlO hq$p|v X".7O[]4ڞ=GNm{5aVDQyJߢ}ͫgӏ}d);MQSm{ hvpG;6;2B^nK,qT<',l)~NCo:$Jn*)Ͼ,鰞鐋e\ S4@lk@8E.>`6!pUZf ۸j^u &$5y!f> -8\ȳ'm ~zH3{+Ci( FyJ-غ@=?&*-[s4e5@bŷm}-; b ak>{$dě>$wE{5,Q!ظh,}ݒ:bm-[S{8q a[fRjuɳ*yfζ]iʳbFYwր0 oc閒6ض&VZY^ʵT itBczFXx9%U؂bu eK=)∾aqS_aXԢR~md>A4LBySK) bԡ%O +,x\ѾZWa%ŜPę1mxFܒ8YeX$ClTmmk8! eQ|X}ulZUIF Sƨ#c*qCd˙o%y-BoyNg2kI~uNk]]|bQnW>RsI#CXV =^Ep- V`6zfLL7h\K\p$-.%#j {TR\WW)L3p KԺ"*g@a\hq'&Ӧ{ u[f"`ZSC+S (iŦ/`\gjXrQpW eۚr[ky]ϳ*߱<8Ϧfݻa EUGBnaqWk)Vº"Ep$ulGLyGLVriaHn PW~4"2մqĪc=C"j3& Lj llefm[ " 4/o ֌*3=})CMq٪F.K SWFRb N5)s)ASF\mw]2O]5[+W}-=}>]OO""̉2T٦Z^њv%願?nS^MגrTɎ[O*g sK2WIQ"pu5.S3@}ͫM!-ϒU%]SI␵.r5*\H$]zh\gw0Tjm.Fݖ^SOI:HӕĻڲR!3O'gh~vFW>\ FStn[ WdyTj^u} cVʾU+р[izE*[0bX !w_՜2E$C$G0*g|]͵`UG{3z{ < @*.T449#X}律cyyֳ7&d9c8VvykҬNe'W7Yx'GBȔ)^q 0v0wbI_b!SYVyM[?_nZM?6H5UnCTSʲO-=Ԫ&n=#ӳn IWZ?\ʡݭڥD"t"OOLߍ0LU|rNKȫK3iTو֒\Ҝ4DufXϚkݏp- |~/ߐ6=^fٿ!p_Cp}`;pX֞,/"D (A/w(+I<0<_ (|} "oRZ{?ь<˼}/Ξ ,n!,L5E 8w@ڧlۗ߷ߓhrM4e֏0d11h~L (n)n {9`q} \}?&k#= B\.d@%A!dPWf>XB~ (Oݟ;^_%^}x% h~:a@y{;Rp;>( Ǭ m³a<ӕA"k{TI,8~CUj}?5# B6 YkKHc;`0sa9r E-f' OO]c!B#vD aNWS)}VӯŴ`6 *Q%>֧sl mW !-Y.MnM4l@D[[줹͐,'NJ[ےH~ß37enf?a`=y{<`؋%s;v eZP4֦$=mxyt3!j:gY+G:~Ыn&%<} k̏w_)>5UDnfvǖ9Yj <=zꍺ"2a4/L88,GU:~w"}8{ޯ"~LNnUr8}o nO^tP(v I1e `|W<@/}< B&B0: d C훗ؐ ApkxBtx4(H@up6 >9G /Fcys~@/dVأC^!֗:c`A fM phvʰC(/ 4!}@@AO(ĀEh 18M@a"o~-JHB(c彼 XPx ">x SF(j!-! s缼57(\{AGqx5=bXrQA8 X4n| ~ ;R;rh?{;4ćC8A7Ĵb.0{Y'g/Mf}0O#@w ychN$= 2HD!= _1_iH5&y)wBmc: ;8 cO#hk];1?pmk8uPzYdqWRo"X`9/D@Ë*OЋc{P8yye?o8 2XtB>W' É ==cR5{H 44:ra[:`¾C I8=PatB;{0Ccd08]3aC?;8ثo^B5թWweV^Ψջ@O^?y:WRQ|mͽ̅jM3h)Rrgd)RebQ- wd]A;S@~<N!eY",$ҊH ݹZ Kɋ̜CWqy.2Yu3 D82xPG'F5YI&e1#2.Gl4ly!At6J;> Mv4 t `nup;`BeNRTIJOx{4MB\ Hn;u3 MkE`tTҭiǦ맶9vb 8LljmXF)]VjڶiyKM%J?ɼW40qe21CL-vtlٮ%mw>hsp0(%Yt3C}^ğEYdic2X擊g4If/ ws6z6ŭsr p0C#*NV[2u!\c=A‚ީmLYjt0tZl 3:Kj諱&^kCm߭!d!{G<-ʃhw L>y!~tq]J1U҉/{kv`h,$|VȒ}9}"h77,עq|"~;<'Et#YY+|Է}BVeh_N7)%K܃Wwm۾7TD&;88p8HxBNDS`-1ӝؖ&kO sǡwCx6Ll;t³|l2q8,&AkAi9 '~?p,hK9pG!e6t_J]@C*.%d.UTf.q]/{KÑ{H0rm)no <(*gXbW-h}R_26 eSyZQ+p= Z잢>x."h=j6Y.bS7pCwC䂧KC1Úc'@hٍKѾMb9sY4vm┵XF":),)$dE6uWia9p)դwƤnIPXQhEVjԠ^nRSǑF6VjgcjǎhtM ?B齍\\&OlG`)C`ee*US;l1 ]ȇb^!r } m̵UN.`ӂԎj-Ak_iD)|G=*=q#U4OL:P)Z~/dAKout]Pt{An#&۴)}D'*}lҭvnղiڧwֵץ^ViڏBPg]Pg Au&HA-uu^PiBPo]Po A&HA-^PiB`]` A &hHA-  4^i4BP.(BPiG 6K k!%L?9΋n)<[yCSg'}v ,d&9̪ă1RsU( x|8{2 $c @;y) xZe]L<2P٦ooHݡev0=SvO"r^珡y 0/Y"L'ĥᐹCH-󪦍&W a`DSf >:K;| UV?y *C(oD4 /'DԭV,gXI$gjbCܸiD&cljH>2YO V_ǒ"\ of 30[.x96VEa\w,[_Wut y|IQ&A1p{ƼH3_׋5|9.$%e[ >e<5;n8Ll4t~+.E11DD)<,S Y2Yp-qWK!?0tE<Dadg3)A I4֊'Nk#BV“!|j40`Էz<$&=I#wދH U ޜT3`=pV`3=Ug }P LUGEzlZo &.7\{}"`#xV}Lik+zB8Q'l/p#⌊"Ad.EC]rژRqCӵ M'u{yC%xqs*QeT"Z 2_|*yLaa;$*Y؝vK:拘D2$j!v\( sgHorS.Y"xZ-vF%tTpvuPN1Zv]1W#Ty"tH9 S CBpZ!*h*D6AJv[E=^ ;`hGlW*wz.!z:+\t[H;9QWmObH,D3TQ[ϋQ#h"^KBDDJDيTeT{pp iMa"=fӿ94ŅzџJ岣VKt1K\PuSբGK4DÌ9}b^Vp[I|e" #DFKh2 Nچ;9RxPXɴ'#H7TY`mji{/+C/%S{ǰP|v(R pc@0pòޅwUSfeg]A5veYɰ![OMlۮ2qO8 ̳/]H&Sw_݁1 ܑ ì0 ,n(Isgwq~c7?9}oO9ܱft6unÕ;"gkugi?dZ߼::6ķġW="9Jm=SO}iyt>\e@?&ܮ+#{?+q=8ڪo[NU~b%əO"(|$ Ca4.:%f)ô% Ky.`(~Xb}jZNCbg˴֙>-aQNR,Nދ;{'q21AL  k,JޘDJ:~C= m_Y$wWq*fIkT< a̜$V{{:|D\%\`+Kp՘6'/μCVZ808?Y2o3UX[s 1fثf}M)ݱGχ+RCqO;!$& |0`kF( 8zDHw31=p\ĤJ1pPpnRe9QjýK[_Ip p<^cOb,)$OAcBK<i"[ì&}̛*nzW ۼ>MP9k&hFZj9\%JfWp%E%֛;vTb!AЍ0Hܱ99 @XitX{a3l;`tkT3W `-Iq[`iE{1.^Wr >ՄXUU_aY2:Vc-H"UXb>Sy1 =` KY٤!Awuz! g[eR$Ȃ-))$c!$ImÅ PF%(<\ %+D ҷ\IO+VOXv9<=pVK+Y4ڰ<ŝA2hؖ =Jo5o6tgP Lf Joz;R)ϲ Z= I$V(Xͧӆf-P NS.[`wHt.CTtOy`%j1`"JFaKB#!ѻ"GgbWdv1EeZsʚ,T=pT3 1ppjR=Mz^ # tqsw;GSGxY§!J^O]JUM 8gH&֤)k\ńx3鰜ym'z2­SjkĔ, $DZ{yUx'R;1MwhqR3np}ze]xa*$ f}*&sLR^Ӱ :eE ,]ɺ_Ip  sO!ei֫DtWU9#uz6G-iaϼu]\P?6Cu7,N10&;:o@?;<vXհ7 5BA{7m6.} dils|' e 1Xu^!.bNxajDq#fD+]w$85z _\ >44&R2jiNG ,DgxI_IGqޞHp\[5ha23Smx܊jc1˜c%w}  g{&J'XKpXk"/ExԢBja,Q]c>FF~#>,#6\WdF1œ 3bpxixF+b)&dZ‚3eI^ RG6I;Y$8j~M4V N}{6E+[*djT^*oQB4R%#,W.fZmOEI>ZҼw:\6UR59-ȊiĘ\fW{9QS(*?e EFᘏ /b4Usʹ QlcFz$ J͜/5Jmh飥- 0L ŎԜ'qp[=?8zf4;|‘A}a7{ogDzR9U 9Q&}WUF800 Gn^E*kת,@LozZk-8Td삱tIp8/W<ߓtjNxQC3xi >A/{̬ۗW-#zw~}lTvz^HD+^$0Zj9@Or)Qz$);KH,T|JF_TW!1!zd7BI_8c@l~U3~QH.GmG`iyYI}}v(n)pt|lev5+Mpp*k66h G:f {"Qʫj>,a:Sd61sӚ zBRy"d[JKmJDQX2,REI%V1.\[K!5߲xڈZn$<Ds&#"j,vSx)<ԅf7RP)~\rXO<_JF0osM@g>W i4GRؓ(t|Ry23N738X`d `GMv$t1~c~rݣ-{O-um/}hu|x e٤G 1fF~k``W,M&%NHuyeUm{;>Uoϟղ>´_N$8HV 7VƼ(&WH7tO[/cHkN6tsL<9r[-m5|`z['഼*;E8/LuygRHAI*LS0d95Gn?_oF̾Sfk}2zESS3z̬')eQ)I7}ӕ:L2bjC9yK}7OX_IPv0piL%p#S)h c&f[MӃE:>c!WVj 8SO CdC",]m&7nFI%c^B83J* axio9CEWOc_=.qGN] G`nyGunSr3"/Wk <@[:2\Ip~#BJv ?26 2@)qOsuMwM# \w7Us7Qӑ'b75̓F`,]xF:c} ;DTRUOc)dZIkm1ȃtYpIpW1}~8afpmOs/>Yp3HgɬU,C%H?,HHטX">6 ҋ`,]z ?jqD|YKR-6TzQPT2s^8XdO}ߟ"؃(.T]ikPh!Z茺*g ?_dz'qDɼ;/jk '>%^i^:#H!σt~t ,4קW+Ui?]Ule3Xw/O^W=_7s<{v@_3]Oc֬ynu m޵+Ep$|?Eӻ{`NGكIJVd;R}-ÏÙo!IQ(]^S;k{B-\;W ,o8u#/1J^peMʭ듞KyVp^ .p"N׽ d "0jGLjEF)sz)f1c t;Uؠ nQ;Z=ǯ$ˇ"vU}>c)ۏ7zhr=U_\> ?GI/1=CH [aAwKQRtEf>ʖ;|NkV1u , ,oc畹FGg1tgeW$#V!88f.̝"GIrOn29J~.ӫ!F@@ ؕGJ_LqTuˉe>= lo L$ ?!B odZ=BcAAe~^8Ia+-ibM=அm{0p*zє 2E݆Χż-5kS+):yo ](=`aF9.^V`KnlG[ mݪVq:rT}9 91XkvpV.,¼GQ*kHH-1ָgvd pWn2MԮjweCNIEL]hl8=/<3^ @./]'X|pT{*ZrO.X@(m6 IL]W"0&hMs9+}l~Qa^ύfc|Ya?9䂐: *!) V́ [^R:d|PΏN7j.lzFw_d:s-ء}.yU{S>˼on1JO> /xM?iњh6;<${3i%u*-z*WrRL%7=T/$b:UWLmLZֵi43ޫ%  V?66+m={;bٴu Hg6vvݱ\j+miWLH =0ae:m#}pR + mJSHM K`-hW+'UO#ޠ(!=ph~yyCV-m.~ŜGbV[ ͌0\sMK9&~B iI4%'~FR+m0MËrP7-@ Z>0(-h\)-01OUz|'-\YLMdt:Td8AI-RXMVl<Ѱ\$rAG3L-1T 23p(a/aYYډ-czaH-aA}Ʃx",JKD"+gY[QpUX9[r97 b_'1uG>0EZ\P\,A_j{gہ]:rxd+mujV8@:7XUҽ=< R# ;qˆ1auݭH/(Vg}zKBmBIs(۱c¿$Kk$.^*9G `Aq7/W$;,K&_xR*85j6z7kx/:Mg!4Qi8+R&(tPC2M*ZCy3"9"W&Y> + Ds3 0 _ӢJ!֕?^3ОZ3VM5#`3I#*3Ǎ7>0ʀ@|3XBf2 x Gff6I| s8_Z;_i*I3*֍Zd='fYO*bkk> 26F0Qb4֧)7J)qaBRŹRuۊZ붕?^3$e'+f8^gFs\!ʚI]*i1Á6jb}DISɥo:^㵬kYԎ0L2ңXLFejGݘ0&Wlv~_FC-eq lHEu`|tÏ/xr9|pJoX?уߞOx=R5o?CQA~O%!,{.Ǥ-Iſ@Т `L*miח>~ հ2x0:js<ǩJζhhP> 0Ǘ_1ʿxl]E?'%O0@ϮH0FCiU%/ L\Rkm Ћ鐧;IkIq<́ԸibWzju$$0O>XG'M_^J7;DHT} Є#ΎaڂG}|XM}VӟQ-ȁtb'+/11QG}i>W1:F[Jv\X4FOr@ )5AQhSߗ)_s ϨrPbT 3)s6%*FRR¿FRRNx]3EQ܎KٛU/q;H+oNhVDc NoEo":.WJ˸=RZƭrR8mê% Pb6G*zEt! QX .Dz n<@K7/ÕAm yBy7oMNz0IWda)D0L}PZEVq'Uڥ,+%˒yKi̤|k9acr0?y7Ʈ% v 3XOHkv= Wa_"z& }L&1Ky6&ɤQRWCw\㱜5@dTh, KcQOѢRP䴮cf'm$l-6{!=2 T ͣྸyG"3JJnc|ݗdfI6]n\ /-41S7,*(i9@jkWO;_nxȌ t,Q3P}0eDۭ>9"騉ڲhmt02$ M1ܙn&[}vԓF FXFWF-5깤SBD :8E8y4>"R8hOьXی~Hc楌:LӔ.S&B{! 0RПDp{<GrqrG Xk7|QI޵$B֔ .w7H2v2A&ٴuE(9 ߯eK(٦X$j3g(T0 &UhY ɵ:\jBXBSf%^xK|~pud` a_c2*0'=g_Co !@QKgPٻ-HDq?\}4y{˕&?|~o= Z7'Jo_։w /MΓpUp1H4鯵]\XY(^>'N |׌ɇˤ^֥B)"@0-xHf;{B^X = ߔ 7<@Cɝ=v_%c?9:;YzX{={ulKŠhu,1ٓS%߷6Zsщ{LS0;[Hk4<FXR*κ(XIʜÇlU}#=;![3 dc\qԓ6GyT p% cZ%8  }c1EРHJ&̴:dpՒjRTE9"iPI7' su`MGheG#8sqP,D BM.e꟝D'$IT#~~2{7_*?X9ɇ0'DdDeBllOS$06M>l_o ZLI0&y~vŇ SdTqɔ5Y.<ZP f'9="%ݦʪ#j*aMO6!,Y\)<7Tx=Je #2_Sq$ɽ7*2 ,Hse$ŃR\)t^ 1R{6=L)kR^_x'hpJsiF7(ha{6Yr`?+꙲7EPa4vhYaUHȮ|]vE~o;-bKaƃ;ʞ =uG@ЮBZL ƹ(B,),%qy2mRG #5,1#h}M;'/l 5\1( v heٝhvyzDƩfVc+L??aw~®ŀ(߽ ڃW9<ɣQ9/ђ"/ P֓ TNz.hLlK?sHۧڠ@l/=$L*)xK2:O{#Tyѳ]8sr8ȮGĖ{Dkny!r:y7GQz&pk'?&K]ȼρ"g>Gm/oGTPXճГI\qZvkkk8h5n1z~~>iPރ$9amw'5 DU}Q llIDtBmBdO2ZTt!<`:n9i$&UF=Xx}Mj={PI9/ۍE-&D4DwՍz3SKL+aPrwmPM`8|Ϧ6VM=GJaDz7Ġ;g8~1甄Kј?ag~yvSryS@ jULse6A0WonfG$ټU8~:I"AM`$H๓xl,+ W.a'up-T8%DuQ9( qN ZW5ϱyQp `X.'`}7ֻ3.X}Ho[J-,R6`4@*jZy?F?hZa"[&@y" ( &P /ESkpTLc,ǣۜٹZ1BSNH `+F\F [ȓ!K:VT1Jf떻$qAKTRh.q1prN$h:h1oj(jO3fgɀ>Í/'t<%EskϢ&ð#'_Tϒku'U430]X[lW~2ɘ[_yOqTy,|W`/à@MGخ/b\^D[w^hI֊̂k"{oL$bMX}EʏTPC4AV>+`F"`/޿~9Ihl[']F@#-D"\MVxɞ|#58r&[{ hl!\aJ72㌁x>U՗OX**3&U4̱';#v4TllUu}#NrXX4h4_JVWJeXrʁ>l2aRB n:):-Zof!c~U182l_|>.G?qm:KU7ja݊J19;)-UƠt+}`x=:ٓ*K}ݎw lz-yGB/$oW"6?.JEe_Fmy[Xr6`Vjt®_6;/BXs\ V=貑|-_|Fe5dU~ }aIGnYvr!^jvl2\^#]m2vжi!1sB,M*2]hRM/^T@:162ȄYE-fsxPl3OƗ _zE`Dp)ʍc~UG{z_hFWlzQTj!dT5tQ+C|ʼ;@w|Ї@xX?x$+pY iT}\^§9O!CFz,M lHsԗ0G +ա0`ߔ|z~73ƗNǣjFы7g{:%-0Zerظߋ*+PhF^"Z,My Nj)椷L8rFHb,G*eۧ SD:eC-,0SQ"#6NejAHfڛ,inˏa9Vgy9i,Uuy3b+ QLݦC.k 5J߸Yqzx~wY@bˠm.UCģjW]-kp~+SP=.Ȏ!>4@_|<V{3}R9񣫸{ ת=%?"oU6k5zچP)gꉵ#y+=gV."১5VJyCfzKfʦzp[bo ;QtӧOwLyq.yYҿT~j/@~F_mN ;'V[n tC\H(p| pXs ]50v*bq]}͇~A⼸ٺfzs%(8$Z(,Iyꚻi9_f{,]TdX'? X .5][_2=.,_չcR%ΡG3"Xb=ž2/:W|mS.F, _q|~r_53nl^1_)bJ6 3OMʿk_/Ù)oW*\d15n3 t)$q 狔bIէe% zkiJX Xʼ$z @I&V އofR Iv>̳&$ @/q4Q\xݟq"*fX8ƩKYL 7/yRKe:BWm!*DZfd4,M54(&^ gOsSЎlΝH5UP/Pӧ#p"}0w/3@֛"I i"GT9f3OBB ^6&ZH̥K|!zDXTWX6Bn2od_{ V,.Ay%:sV'6Jb J3Ҝ\[1W1ʪx^ƥaRɹpf"uƙHWgjT h[c87ƶ֦kU VZ<{W$+Ս D2b0)<hvo`|7 \dP<޺hdO򬿡[VJtS0:"? ke`BJ/2K ,ҾȕpYj0YR.2{bxyCBф.I8? ^pрRKAY$XLS˹̞{-+A_??W/ Zjger"gA%],f?(ʼnSpP@cc-M1Əi7QMu4Q\,_X=[6F?hGy>f*23s&.tqEi/.Dqu_rLt5ɞ8&.^ d.7aWا|Vd S &΂["09__.^FX;^F nw)?9ۻdnƏ:܍f2%7}\H&f'_wkv5;|Na05~:sZ>`ʯ0׀97qϋz ^A/o+&z|aa{B`mFBbñP.J 0Rm^ 'QnS銅~ ,tBak&?/y'3S&Xj\ic.tJp< LɜK`k(9l~F׆l2`9iq+:sV[S]w^/OWgO4]'r}QI^I3O'v_]ˮJ犽+` RӇ~/g-r6GbhmN"I"$SoqEf6,J43.EIW(>Q S54hpd}{:~f%W[z!N+ S\߻7!`҉)"![O Y6+2 $,+BgLYS腮c@똸ؾ};醂 4KeIOW]j*PO#-v5fA}w}V~]?W$P/"|l+à /f_6?6X{K6tQHQ:"Cj'H{ z+AaT_/?JT"H3`b1äȄJF,n+E>Tʌx4 R`ge*'fY6r1 [őUOϏGJ &GNMJ%XW*EF֤CA-L9Iuʟ|^9ͷc lZ${! |ԁx )!Dkt^'%t qChJ_x C)OxE2%!V$Dt!H$F]`8|軐b2sX㞡9g4YP"Y3GI,f&vlhҶEGծSmhVnИFj2E$/}-θ?Wm @5s&7+mA08(qe#L6^1(\P,/6td:CP&Iei`O-ё߃j1ZőUGǾ1?h~׻Ň=̜N:Q 4_^{qg J^ ?mxz?._>$,lІVeP%CV)! Sv36Vp%f@\͹ 1K3PÑO|_yo`FܙL8 E:Κj1M!^by۫BTn/HEZspuxȈ/3"s+I.?jPUFz[mQ2͞wgI0!gQ/v E>TǚdQUa*K' |5o>c 9SRH> OcxT#rMd͜ q-2FSmbDmdž,miG>T|`|s&|~!H5% >c8%VBT֖*b\q."X( V'oAPCFF[|=vXD&dA>?!hTMOmm^FpE՜2dx{>obڇ& Q "V}%h9S,kGNtY-|nO'e*yZ̈́&OCLxpxbږqCUS*7o[t6+]Qs8ϲT~HHލ T㚃V~d;[DdƐ%%׌vIF2PSxhᴎrC泀ܨ%GE 1%oe=G:*4r+mҼIb[AA ^!ekzR*h))L*Ac7I;E>TNfeY(d|wȨ8NR=Pao'ǫ&ͯW$䭡SQG ѥY}Oڒ$hʄ OZ"('{:{!}NGO}s dg^ld|zEwedz|wNk5B*X[ hHDhŴ4pBmwE>;&8C |)7m:4` /<6X2u>9rxoXپ7Ct%d-9ꊲCFWFЪu| Fxf)# Q/hRǽfi;d"]>vW}4[5H X38=Gb#UF$KGB?`ަ-9r}x}I|Z͈RӯzgRr-WmL"2zM,OM!Au6p ] ַ"(vvDUن,8ӟ3-ce Lk w|ӃmHm&]*[ELxM 0HОi$N'JP.qxjZT$,m_xjW -\JhHLFJXk)4%aL3y/xavfLXEjl\53Ws&rwQ"U榡NV<EmE>T#^!]vo/2n~_>=gO0v̎lYo/;߳ ͵dcd)=7P2v;UӌKڋJY1mn-OnOYҌ  OnhvB }"fGu _Ѩ/2_e{cOr-V=m0%w2jsjMTt3&sgŕ\vDWKȓ#{~‚,ۡ_㐙(Z ,j)G>Tf\ j2]ޭ6 {ºfVcj1EOO -T[q$eSl $XblOGe2g"JM@\l,֕I66AUuϡZ_K&ڀo/yC,TaRoy^1u "r;hCCѹS!B%G,Qyx)xR>oÏExD H㽌BBP{w4%!3i,}Mgpؖm3ìUPMgkBNbFlᛜA5’hl>-q95F#g܏+v E>T#ݤ\|Ӯx4+ηou7'˹_dr:oࠆa%V6F2;=*!p.d.G%tȨ9:YPTk(@A)Ǐ?5 tȨ8Ikl]*yS?N\#kSwE(Nwb2{ꈶ)4Ф aJK=^1"G"5BwFɓEneODMxԤb:pӧD{IVxܾXH_ !EhoQCU>t}1C*/̿BGha5K}67 d"PrTB}6+ hc8bm;cE>PŌjQh^96ܝ]-*+.׽|cs1Bt+cV'=pXiI> d- 1fIEԦdm,gDܲ l%bi;v"ގz7$E(""O]ݓ8nW\󒤮8CRܪ}l!\.O٤(QڞN(ٖlK6nQ$u34@ ta(cWME~y3_p}@qxQ!Hߝ)F%=$0e{dߊ6 "[&^@΅j7~*꽅;Z(161J%.;b@rraʫE]]|TR.XR)XxZ 'cU2F1+lףw㯘X^Ӑ$Yi\uL O$LI\nu\{= ;PnO֛tU<"t֣} ?DŽ z^Vnj\7OZ]xe‹{$q)sLk}m(SOi*eq1FE~ьO#v]!xBn`1rj˒焇KRCig0^LxMg 6$jn/O74buv~\-ȸǤEIUk.+̦+M#"ٓ&$<&܎ZV}G`t0n|UǒRhc0.+%]ؓ4""bB)H"3u$!.0J(d ) itz肌xx\1kS¸cXp҅d 7uC8{< l̊!K:Ư"MEJ`e LXzxwWME~u#cϭ@R1nF5ۚIudD%3ud^#?)](]'QhaxI)d;̧H0 Ib@\.uNsgC%ZM/ DkdB T~K1|FIkwycoacP. r5k>VOYy=C-p J06@؞ |6#g5f2^d\ LN$lR魨bX y~$% j*⮀bPͅ QV1,˵*%bf<^ sJǮ^0@+ӂ%,&_ǥTQwJUx/;z8kBDjK%<'`')o:x5Mg E~ȓIeZv5&Z`G:e.M}*z>IgN2_ 8^!h^VF)_)/_f벿3ί8$ >ݤ#s&ӪW:/%&p<:N\H҅@w1nrȅ!W"^ O/uj1hc4 :^۵?<kYW71დQx{xĈ1vp,]zǍ-|Kym8b G+r1`<_z8zBvhu<jzfTZ 1oVCoM;UBۂ$ 0ΆtZW IadyD`|7J RaП$y#=,W<~xn咼[KgtNIА=6H˭tsqs6iN^6o'Է\_0O) z|C(x-`%scX4%K9" K:uA KPUEn(K=p|3/Z6|~4 EJ=~IXcA%7+!NT9|/oyIE m3"îSML$N->]KdY眈zb$3%\8I:xF vR*B<׋$;6wL,lYD7_Bnu˴1f03(Ny\9 cf@2"\ mNMNS1|D߆&!Do=0˞BAMn ڜdv*^͘Ռ"R3fR_ õ.M,cH`;h&M`وV9<_;BOm 6nx[idFp(?^{xt#:>1Td!:B vl'+Pq!>U5@cU'*K7яC_ucsdt4UnaFaJ1UitT:abexEA4x}WRgo;e?BF7~ɈZS-  M3A9%g!yOA?q5ۄ|vy <\8k Hic7˿2{~fZ)!ZosE/0y :fԵ Y(Ξa? (7ď7GIqwn5{w_*C慄_lʭ  uxr{gv7[ ,0x5 )̭CM]#>rVfL<]+0Yb<~t1a}NF |BV`7z?V?n+eoE2!8*{縎:~ d'?t9~NBoe ~"8Fr$%<OunHs3ՔW{uy @LEȢ<'p]qUY}sOW|0]uu8KKqBybaݗy!D̴63[-;PtXb!]2Bi:U2c:ۣ~Ő$w= SȄA >`5w} 9lp "$ewԊ1tA.X$ HH:$Tql&ƌ򣰶 /Ҏ@u`WȘ_g.veGox* u}gˍ[OeH)r\|]cS0c}LԆV"Rşn˲kU'CbUc={^5梿4y6Z} t&rm qWh,g߫&߰ l{|d߷t*G]xCܥثA7J1:>pֿiutOc ũXi?NUEO?? 7O_nmΣdT f&Vtۓi.PJ$4T6.]&W ᖺ\k.Թ B,iZHNue5Ky-sG*V.[mvZL&V_+ɄA~}c?!3.h(zxv Q80^C::2L4F 7K9k&9 $aeRI#ŪS 0#u"]([r%Ŝ][˴Oc-h%Ѽ+|8B #Ij8\8vhV0Ch514ʙH"ޜj(,K9 ,WQ¨ ъpsWߜa1FuBr:V#_p^|S+{dGm"*)VKʽ0|CߎlUP=;R '9YD$Zf>g*d/ߞ =ވb{RMc $1[X. NB.aRG,qI*a&TM#jAG ;E'姼۱穌X&G] KQ!au9C94˷>[zb1',S4Cdة#1%"*(X-߼2%!e NY}rl[G̎)c qّ&9E;sw[#Eݚ(ڒ*V^+yTwɺ5yQM^߷XOʊ:zۗirRƻF5l# Q'oAÓhE0 ie`8-a^:}!`h3 FxM΄ZHjq#Ѩlp~ejL-%.hB4.MB%2MyOQsQO^-bګw&5[;kҌysxW4f!dHviن=9;>RoF;AI106WۃmWQGhl꩹w7ϭӞL( VQ<e߻%uiwQ?^K;!^}%1ˌkuIv`' Qx'/%&IE.RYst1# :!EbyY݄Ȫ P.#:)#ãv0oD:t תo#w_,Ј.L) VQGTn8{Fu)a\`1.$ǃ`FK1StYBh\$>=*J4vm`X'Ṳآ=OGe"xC| H$k`;nlL_o]XQ) 1vVejZn_?拠G-,S`|&9fb{^rͷ|ϳ?n_7yu^nf墘?4+}yq/y%L{{_~9S4~rzwL̝~8$ױvQw2|_ qn/@IИ5{UQUy_Cb1F+S*J RV hkhH _v[jkQqgR4pgy+RE?]?a zSvg08˲Qn_Ж>Kz_bV && |Ğ Q4c֢`.=,k֣F $Y`*irL O$LI\3xlfܣPē JM"Vp.C֞)}|p܀ ZV0LC4ưaE=$9A/6ȝy\۰o0آD ɖzd;EdQQdeax`FVo\5u[ǧ1Ίm&A0pg]]U]ٻ.M<+ Y–-`2"FtLjgNH)k=j]S,v@ Zj-@e{yokE}ǧ1m͇ ˦]ȲaT5p׎ V"rZܬY.2[ZDJ_a 3͑Ў{He)p9Ya˗\/\V;zz y2ⳲW@>{崔w !mm@.=ԐVhi|ē4:g~Qjp2kGY;yPT&0{W[7'~ۂ1T/YO^1x6:/˛|U_ÿwoXfvΟ߮jWp*q̰تbOyF!ы"ouW{=Mpo)I~>9 JTЬP}se򻛒ۛd'e>.nl'E7M7S0A[ ?,Y>_nHrn1X?yKZ ts'losOH|۹s 0.<Ȼ/9G۹/5uǧ17SgzZ}@y+xT}7\S7Tp|c/AiP$/}Ya"!l @gAˠ4]Wpll8O(reW@ϳğ:T{da xF5˥"%LEEJD;S6Ebx>߻vh6.h[43oys1-3@3~ GYQ7u8>4F1k>,8v'͡ Oc%av>0RBY T S^G;q'T~"\twEG13qS[fFAB4R)` ,qxpLytM1:zL pEAvpkviUKf\T'z'Ya yyLKd&op5us<$D4k+,#E "1knဲNfxouVhmdWBe %rKp˺:1IZaBik1CH[_h5uɬ0e)xdI8'lTx #n] ˋ-{e>-u{dU(PdrHټp cAJdC´Y2IK=23au2Cv&Q[ Dsa~޹v5;GI K_{:cE8>Q纙=$U:cRRE:H*좖1؀9vdK@|v! [MCzp!S/4]So@Xb[Mc'6k2LUYUw&c;{nLGN5n9/03hGy8dS(9rXwnAF:!ܢsZ}khK5vn|WױdT6LVaF^^hdYI%`j 7yܝ޻O xlNfozrh.'ivyTCϧpZ'e04b@GfIAf4e%tvMIۦJ@ia2IU1PKj7_WcَP*WeuP@auV*#gU3FhAT]S7Tu|\+T;NSdtX=GQ@f15UzT$FsCE[gS}-d2V~pȈ۪Pz9Q kBACuBQΨ-hҶt*<',"bqDcBJ |N6 `5!eR PNnPQ@NGgܺuQD$"(WFoD,FYne=CFI}_SIۈ#${1" q4XM Ϋp+dܭ!J ׼Z7K+۝@ʥ8{S`}JviU)?>] rC*M[E!Ji`S?rnnFUL{H kY"y,rX|oŀlkkQ0Ӹ%Tm ĝY4?L ',Wb*F*Ha)X~ nC6G9$#Թb R&AXBܢ sC;HΣxqJȰ* RU_3‹7:dZv⨵Ԓ~+]Mh!("vi.W^' z+ YS$SX`#0d_SʯJ14Ⱦ(+plOw CrŐ(VҐf_8/K1]SoVob芮1ʊgXBY7.]?1y5ݫg4] \sxԽ/RƸ7wr o(%EKsd'&'&PQrpLewk !Zch~5 AFEfÈGM0["7"Q1V i#s紩VB|ۼlT\L=/L m0I%E*F+Q6ramUҘKc  !3D='8/?{|=գD=g?u7/?bYU[q-o{߷"~_"rZ*ɒ"-r2kzZ,o^/ݷ@vƭ,&!>9>pX8sAj,|G%' ~ GPLsVN,t !1J:xT TZ8yz\.WsnzDA.)ƸWSsOTQ.b0j%n*57}3=<* b' F󏟑_]D }ށqtV] Tdk*}g`f<P6q]kJ[] %9y},kw=h9xeb(9-BHy\> 24U:QUPNRAalߝ=R A Y{(l*Kɐ曤GڟOP3z7/.rym( =}t?>'kkR.Gq-.Thv=a G}ݷ4F葹ѣ{7ß־OQW8t?X``®G,2{co%^?_Y. 7W˳F.oE?h=jpo ˷ pǢv]F4Moϲף*KK1=7[}@T+e N,1J6+\f'I"e4N{pV%wjA&w(6@"2KSKAE-2IA6YuyxQY8@,$ks0G]t5s )ռXC/;xT>'!at0\_CWu3fz䬂`:xT&N!h?.oTcD@Y-#c\ V_~>[PBy6^N,52<|z<ې˦GeP5papT#2[YFB1- 1@I>@CW%gàs:=.,P/YyEeѐ=p4y pΧE*FVgRBΕ gO<* gZPkEaךàpV޽tLoQ]u,j ê  FI蓃 B0>v~[|5* QY843}(c~p$W{$Θ#ټ,(Oh:qwT o z\J,+ D| aQ(P 95Jͩ%eGeA~dNWvq,=|o zc< Gc:g:I;V5ttpUei iمiyre *52,x$/ DǬH-w }xMH}3Oe9vKt~uR#J-8|Yy0);$((FH`h[` NGg"Ge rB*(f9R+Fe\Pi l^ijdgӉqIT+ch|0pO^{{xy~S8D< G ]=^{dǑ #!xdI8'lTxqΣpȷ zx"!<* "&9JviF,D{CBΚ94×Ev'z%PPbiEwF,T| Pr_| ̙.ƀ&=,(!h)hJq>7_mIqN0Q&v!d\@TXr:A%-r[&3D(V"{VWaR--u,jMGNm-u,nR14+e=]+gtpJNqʬ̞;/p2]Hp=km. =~?$q r?wR>|<?Ruq|'SE3YTlR067',Kb[-R`Dsl#z)>@1#$ɼsT: FY= GYՎΑEu82ίG%3hӄA+/HǪuA1_txDk]òӃlxuq+ `gzV'K.r2݂%p彘hLo[cϲ g1_M#mu{﬩gYpdR {K= G~U$!72зѳpzHtX*RGaGl܇Fec# 8jR ƔK0fu,ԛf8݃ϖ:zNgv7eER&OjyXYǓO*7(J*{qs}mꋊڇFAjگXQ}Џ ɣVp>j%"8wѡ2Nv8Xԟ&-u,|ك=j[Y89VF[Y8=\)qT-u+.ygmsVY=|r+ƊDDqL-dy#3-fɬCOznH01 3})A[c6zFԊJiVfϝW݃ߥIhLmi&7Dô[z}VG S豿DH,$NG~hԘ }|+8dO*xdWJ\~r8q8jF+90e-|=)ήݨ(bK%՛cƅLSxeu~-IT8N2r{4 ~G~K`k;H9+~ئd_ܙ/ / 2Vތ&*yU᫖W ǢݍY3RZZİPgfX9pt(q(el)|j)f5̇c.4?ܾqLJ1&msL=F&nd^ ]G)`HaPQ˯"] J0&gpO_EϮPW=/ϑ4(b߹X/S5(Gr[ aKa:8Ɛ>,&-u<)a}T%'7{l!B3Gi+-_G00/dE:VGhLP&fԦvCcLw Luί#ɳMy㴺xch?x ՟<]x]˱3hmx65ˏΣyڈm8~Nya={`Sx'm ׅWAo++WF5Au}`vȟ=72]~ ¥b+nAU~z{~VBgcNKGטjza炫r<֯-3M'U8垠)ekWxs4s7cV ^LBA?D{g-jJ 43pLIbcFANaYy`誢Q]To9 G?@Hd~=(?F8䛯_=|sJad_K&J_/u_7h͟uyu/Թ3i oҦ>(%<%=j.ṫpr/OT 9d6ײ$}PY8jcYjY)16Et~$MGR}:Gor[snv?MM_6< "^BƶXav٠c1^.7ᤋcfxcp?(ց@V&K5"&䦭(iw0YTēJO,y5-i*>zXŇ{KL4nx)9кVsfQ7X "qT.x+`lҠvԟ sM%.0՟Ӽ5 wpu/FfI cWNc/J&R"dZQFR'`ȤJYHK&*"a֩L٢9ƻ[.}H벶4%[0U{Y=Χ|E IߦC[EÑg;G%.7^mr5w}qc:zowꍶ\7vUњ՚pyř=+jc5kZyX,5º5ӦVGRB.\i.nAܢ3K]ʩ.|Xwpܼf:)/͏go7 'i>ˮ+.R7[Wѫ|Ut |֫ڼۉ@/vW/v@10.F|ЯG|= -_-̋~mYe9../. FJPrg n`4-A?J3_6;S.W&.Pv9T۝Dt82X McPh2ɠ*ӌ[vd샂k^nl4hNپ01qmBWjvQNm[Q2A<+F+8[QdT6IP@J 'h@@eILXBT*CS-L acJxj6hʬJ, Nܜe߭g mWW-x,QׅoR@(6/ݍvWQzx=. R5Iļ7B9#$Q9jFa<Ƃ YN5f BD1c"0vb 6C4vj*#,B0*$0L*ʌ!4y U4n_{ w]{ck2VtjcmƎdTЉF `CpJ9\pN:# />&h\SCJyF! V@\f֚+i1uk$R Te(܎GNnLQN*hE,5qdFdI16PRJc:1 ¯P}nl:N,m%6v.dyʅYOq@mwhl܁=%T/;#TBiu?f~7fӹiE&qLu-FHy 3+!ʷ/AGBLHٙ.;#[|m2ݧT';K!YN j zvd4Rɝ4 ͈G) Q[ 0a/)[~pw (9Û.`MW1LtmBaBހP{fñP+[E?][;HЌpc-.K,>D+3v9Xg?ކLS x@3W F7CZ:Z7Gjvю0`Ŵg>Y|Ԭ'Ufh# +L:"!a..B؃V ɷ&iCv6KtQ}=Ġ))p  .v(J[UomΝ_bs'iw^Է^궎Rl$M8$q +׏йMG7P hL[[`k lm-@ [[P[[`k [[`k lm-&A|Mx}.d'>W\` s {$oo@BMt uƓ 0_AGCs+M/R:y1m[3NAIܟe9q[O)o) 0G&&)T4HF @MdN$$)wNw> X޼T2r=4>vb:˨"Pr igzoeUg5GVZ9ǁ=aAE7a0p 7a&+.tJF܄ nM 7a& ܄7a& SE&V@ۏBA}6S 2* k}g}zaZbUaك-բ98mgY>3~Euޓ~?Ze+=qQƮ#?Wo9Mя#?"{dWz+ [t^I_&I%LF'+zit{Gj(NkQÁ,* .Yӓga'sY~tn^~]Fk _HM{|:8 IPӕT瑣A `6(kPUx]hq1S̘Ԧ,6_M'd!5KWo]]ӝ|fςx^) |KH #fNKbn)c"$1NK͝cR}\헫Y}E\*p5oQgQpGljR ,#T!F$!IF &53βLuJdLYMQb*>GCi˵PD*d,JNʄ!LI9&mg1꘮kGÁ2&q:?re4V`IY@ܥ?{WWѧ9-@ s ږ4].`STJ$Q` R. ʈ-^FZ6W pk*|V i&LPzX-Hj%t)х/whL.W"BܫI&=_<4yI'a`o-F(WKW*6֋ M$+Ҽ#O%dH뮑z 9.Oת7L1(ޚbw.[<|Fwޖkw!,VoG6WHU"LAZ46FQ_[&oUII`TPT1ωC)&L0ai , dK2@vi 4}(A^">z5Ԟ*4+E\-\?u|G)l- 1nOSح&c Q:mbҴXX)as( pH%9 .NAaxЌU&αTs+8k4s(  N-dńASp09F'Gxt 5$rEy #;3ƤҔDzS,OU&N9FD~4x S EMQ jQ9 9Fƍ^yCDT'8 jv CGFgPhՆWɫZ]-f (s{eJ &U+Ɣ 3( h%mFUͲnV@j%ŀ&BJ #%Q[Јvb"kLs(o 9K>*HD΢;Sϡ054:WvYWQb]gvB͡0|.1f1H& Mk9E3rvi 41k$rOh*%٠Ehi # 䠈"-*e-iDW&M`gPi\WK,8P AsKn?d7SN|W7olͷ~q@Ȝ]`[;ooK0s"-Z$EPdYa *bEcZa8x2Um>j&U=>oALAaqtTRG-Ab Co_`Ue<ڢU)WRh32Caxs^+4P+v[Yr!j܇ϡ0p25jyeeAWZF7΍vr:9иl_YTJ=:hgPpIf}OnP=iBrl;=1 3('Aqu߮xͺ\ t҃5{k#Y UBl/H׽:'v;2E%Uvӿ]Oeakƣ7ɪnDox741m9qm &ʅhD\P);tvho> () U fe&䴧9F7vx.ft)0A֍ rRsDe2hK,HdWxRhJ.z12NAax=zY[dʇb |m?roEF*i[U~ g2)3(Oь^@kUeV &VR9Fwnm$I(O  Pɗ8Q8i{e8\hԚNFqB"8PUp2w.R&mc֪EI^rILeϡ0|vKXfʪ-zDn,JDɬ9ѧ}j ~NU–]0`Caxst\9k[sFvi42ʦ 9бPrPޒ{gߺzK= X > $ cNRuRQO97Kd}DnoY-D,d!6r-^,6װ9DFgHńMT*tb%`'HsS*5Y3R̹*x`tY2'p<CLr,ְk4ՍͱQFckj/9v/5ށAa{j3byo]%[}b9 a5V#rD!urz9cM36Y prbġM!fL8$zEC^Lu6 %t)eSܲ$Kr[U|MCйG}$YH,AJdDG$=[ӤWўea]ΆU!>HT1,LU%W}.و(fL3:s"[{]^PߘW[>6J6U#:(f{7r` )pDea,u$ݗWJ6znuu51+4+ 4*Y3`FKlM.#PhfOy//~e?hH5 (ckD/+C%U-JӽoWeþh-4Peab&QENcm+̱:a [9DAj)|8LL'=RՊ{|̴6{k]h> Enĉ|gMۯ4l/*d1su`19B̵Xb#X\þv1Gn߽]!g7IG@׳UO~1.uɻb˳y+% ށ#aDa5)<{η@]o~ܰ K*!{: 3,E}V1MaQ%JQ5C&e4,oݍ:C&&\Ю؂M` '͖Q#EQ,Ɂc q{V74)~Mp"ϏaՍ_Km%!Ml&4z&:>(5t2V\ƪȫz\_uRJם4M(5&.1=!1VjSmI9h]5bVj UN"Zšޱ[,Wc #ZF[CY})bb`wlpcW~/+ (WKb+FhEIՅATci^䑧C2t ʼnu%"5{qWc5?൧_xQ&\xv{ :^JWY}FjL!VR CB1~KW֫>ҎunǺ)jq(׊ցJ'NVMzqoVqP߾\܏:*_aWGeRyT5I9ZG- ]G3H8;==oۿ?{Ƒ둻îmxMr i"Z߯z(P/ 2g8S]]_w^T9kߤ׿{tҩˣ&,^aFQ#. W\sPLjЊpMB2Py D1GRqgc9HA% 23B$Fg̸'K=?"@!'F #-!:}$O9?**RAoޟۋY.3Ka.$rR1Fs<`\9)\ RPn]F!x! (ӽ<$#ryeڅ%:@P|SHD9.d(IY?J- g. y9͹U 'ITbCjcA+& EVx$* 51~+[ rvU[v*{.HqY4NzY& nTٍ*Qe7FݨUvnT(] ]UyiIEsQ -Br>T8WqZ]"26#E_Vc'_55?:%{KŒm˛x<|z% pSnJ(hI) 7%@} "TɒpSpTI) 7epSn :BhI)`$ܔpSnJMI) 7%$ܔaQ%᦬jJMI) 7%$ܔp.mmK[ֶ-mmK[ֶ-J[ֶik[ږmQֶ-mmK[ֶ-mmK['s[#񣳳00?WGᅠD}ӰnԠO#f \-Y:cDMEO? PNVD1d(`g8sT:&HUo NRjgc-:[pCjaJ5{^ӁstPkmЖ)hO_uVWo{Ӝ_Sg1 Y_ }wkB) N2G]3!T̨]ʆ!<>PVAT#L8Nٜ+ \U?:{gʎAQ*Y.pѲyq.916/I~g+A3dRIuxLGEo~x_*ڪLmnY§jF3ЫA^;ۇNQ&EUkQwLת]?j/i|bP\`akmH}a@<'愮3 da?.k ΌF7F!% 2@)["KR淔-e~KR淔}B^̯_msOmL$<\UkYr1 +c>P9<—rnQȹ)!O)D.,8dBg@AkK"Kp `H}$`9OS9 2L ZRЀ(|9OӁzJr\3i IS9EsWt2¬ZD2f/g܆ ESSeT;9U@1y;~o B]ӂHBѣd}sir #P5*Y c}fɀYjv. X?K;.&S&lSdk `\3grD7 FcwЖ~7Ϗ@=뫳Wxz8f Emy<Z{~ޮWeely/ s7h'ow (%y]G @JhK;nx^ѕ^4 gR-v Ԃ႔U E E3ޮj;G׫Ŧ\Ciʚ5`QT7_b[B0͂_0>̦IqODž*K/F~ɯƌ#lWp yYM~f3*G&R8}K7}h?jN_7U}y2}3Tj:ZSMx8iN՛gˑFӒ+_ 8:f'ryAjS$f(ku|ڍvNeM>fs1ɝ|&&nv%sY${Ŵ_Y(p DQ/҆X#$ \p8?,AA0LjױN*db^LE9GeBj G߯\|kqNHish|Nq܏\C߿b&j%\77b;-@-:w'Q6RDcQoreq.&WK0'r'[YB?\4tqz>Y#R'HOGg\2؁{K}@$l.\qNn~{2|tF ,M% -"^p6xvkvͨe-/u3E]tw+So [ThM>NON =vonP6E/ne5ݞ62[*r~qU rv]BnlOt4^UWs2_vA9B6=~0v͚Qôc׿XKV^)oJߑ\T 櫒`s6TEq>S C͸bx(^dDn;e>NA|&}|mȵY<=ܣV䪇hrmdN g\'-SY IR$-HqsU~,u[,5&(nLVmWJc\+}9SUl0/sr4YH2ur 8cAi&N%qA-Z*~2YAGFq `=c *u.PM o "AZOp@ob6pzf&xb҇UL$@O  o),8P-GЍ"0B5}Rb1RM`b 8KaJR5rTnY1 ֋!BM>8ʤJM"" C$%FD6ADb(6 *3NcTJ3b$4ʡo!((ADl@)IkjL&(!,nFZPnSF$-w1Dy(B:2+e !8`QQaFzXTj 2BrԆ(8$uPtyͣhY Ub(P)+a9GPhQZ{"(nU 8*fno"Fh' v iٻ6%WG8@{u|?ܻFЯȤBJvb`R"%zd21鮮:NMmB7JFˌ  hB [П%i 4V^XkxeY r3xWh%Wk1h aݚom@1c$B Lh@tqclQȌ<BUoYy<@Эzݨ Ä#Bl `L1R"%80)8Й5TXnťGfґ- @M 4Ù@HqXHq[ޗoԑ5#xGx)(`~Z'O!@OM!Jہ]T[C^uE̾lD5^}FXyWBb|M6y-d -J1MCq  v/UTyO ~,BZ Iہ xfpଵ![Y{>~)c֛䤁1&bbrD1!bE}3*߹a_W.(}t+&{@\-0K%V 0 CNۍˌj,(t_8g@Z PI"LjyU*}ڄLkptqcƍ@^BB$>y _V 5Pt <o ox:cHp#+KP-WhFymV Dw*<}*w "Q _@1`bm^H@7SAʀv15K p[O hg]+y 9.z-| bhw@[vTh-;i3tAa:(ڔkXd5H-Ƃy0* !^$IY6H! c(jS,К=uEGn+itԑp64IxFDh3+- B[ q/GZF7*mU(am '%e Sa![[`M.o7csy>`tYveo*3IFPC*D4IҢJ$vM("bq׶aXSQt֚B/ y`4vMfcҎ`ʫ?>6"|QaF쓊Uwâ۔ آ9)l>TF m]$Q%\IbLԃ ( hTY")RX 7n#2l)b*'ˍZzdҀ'7V"gEu7/V0 .LtBѢa6 Ed${ag=z%Ϩ{P\GJFdU!V&EBb8?HAn`p(G +Q`T̤Q `~9Dqކ`ݨ>jE#V ۥ!X`PrAv#jI|$#=tb鱓k|6b hx蝩yD]pBkR/*~ֵy6;RC"}@ysgrND.6U?@N?}DC(55D{: 8`ʲIYݡ6lYGmK܉~roPlPˍ"sGky%Y+Hyn@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X UM'%PF(Й(VV=G%*3X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@X p{Qzo DZJ 9V=G%PJ V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%U) OJ )G D!QJ Xi+8 zV@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+z#n| )]m~UPZ],'ev:=yĀ}.?%C{#\Vp V:¥ \4@ Q= WT+ބ+2wtdpEVZ+kp`T\7$k7ኬ 3 WN tp:%'7ֽͣ+wT}5yCp)m]_,_?f2,?Z1Gwd'\WFt>*aNOQG&Jzo80YkGik1JS1z9.l.gomqBg|E~3'3 ZzoӼpX_fL{ ΄087̕a_9Yw>܉YsB_b!j_{1YIOLJ/&dz?_GYp֕͏ܯx":=9<x\sHӚg&M ~&Ӳ9֗0VA4M`_nkMB =oQ+'%qu*RU>ʗ?k:ه,/v[ ~q0 ~Y':4V[iXXnd,rTrx έ?s:+yKt)b fG$MO> mf?jk`_?Džp(piN:g#!Nh7NGzi +;̕ ^bWO~aoO gG_7}C:y~|,9,:` }0(gowp÷\:ů[+Ek4ұPWJ5mٵ*t .Z/ibҌVh4Z͜+ewVJ"=BHdIZf(IN@]4R):5'E1eUi6 lrJQ h[=K=Hn.BVX{|m!++9Tj2^klؼ6U^kK~8 z)]x RS}x|wV>~$w=wg>tV$\E{Fq_~QE_OSm`z nn5VM#)mr} mwֿ qe ,âKϿvkd=V#>* \N]$ew[~ԜnLm}>&*GlKλ*ݯ .c ~oڸOla㡝\'|i7VXglw y5Kg'S7w1#,@@"tIW㡑) <ž%~›j#C*7x]5;/TӄA:N)}5ڝ =GWBRe܃1 0hܼy{9 c^[:S|poӣ/nlz&uo~Lb(W\Tf!ˢ;9ZNJ؁Hؖ 1> ;m #o~ ب хI9oypzk4k8jIwY9)ǟZ tʗmQiK 婍>[5&BXUXǭ`#̹d P4x5hcq:$W_xjLP~caXpmB(8` KV@7p9Ⰰ#kĹt c:>׉,yliA梐' -\r< r>?ܦd]{Ƹ|NOs2HG!a9z~ .G/ًՋqs0R/F>v1JxbKOhi%X}?9h6]B "rSV&9A+O+u>w8tX+wx5e'zyŐ-ұOQ/WOu8JMEԪ~x4[(2Kz(wU*Sm'ýi:;^k SRpO}3G!_69اm:9~BĮZ}9\6(_nܼ:}nN9 W]v+N w1iKHK[58Oε7-]B35v 8'. |W:W: >nơ~QrK!80J('bYfUoWbsu1!d]-|bn˾w羛WnZ{_R4 *X|h@lr ZIV>BsJ %8^/h-ҵhiXp|si>Pf(XTi+/[_IJے4TKQjSlmNۗ[^ZwMyou_`-ԙɖӖV7N( z\N[WvvbÍTO'yh+FR*ԓB+R JV-_u̮ %"wev-ݭ$lK҉iI%߂ }+?{WƑd /k8[y ì$,, { +)s&[$u7GlYū-H6+23PQЃbLg*Ky0,F '&JI8}pqi 6ӱ?r~.|=Gc:BK?}giNCZ;)ҁ2L Zg%$ۖqrQtbeۄZȼzs[4oѦ4-/豼R/ۛmoGno>br&5nB߾dףƭ3&$ιh:t%w%)% h yPnE94NɁ.`[smVεvm{;@4pPjF]i!`(-DJrA 8E]aNreF15[l r z8j"-~raU*((a+񅁮f-HDHK1?J`Jɉ?N\v;㏓VRםq;{5S Ό*/Cht~z_ϔeJxuoXǟkj¡<~x%dN1p+rac$^FcB+cy?1"Qsixcm& u f2yv]_B!-ۘ |A-RZX" q rsCUJ}}źD!]BmJ=>XXB>͹5$2gl:%"xbAsX %&ǾqO 9&ag4g}j}ik;D&(]A/Ks;].`^y$W%z5U-/+'Wȁ5TG 6[(̫orMzU7ͪ FMIQIZ@w,V;ϒ[A=J)W"ޯy4׽W:ٳܙj^{Q*CL0Ӻsٱ ;;jI}xHzB!4C,b[D)rN,"ܑ p63b%h< B>JCDnQ2k%5umgDYgD-Ҏ)vUZyi4rΝ} KI >@fJZZBKWBKWh -]I O )VA5ZYk_E_߻VL3Hc!ǨE`hD(`t2BjHg@[ZaqHƛ<,U Q<1C*&Hȉ T(T\0*6a7AJ,'ӎ$t&# ǣ󺉰w@ycp;}Vbc7Ȝw_EWg,6A`LA`4Ax-1!m`\_㝡'w6jȖ[ ]Ioےߜ(6ٶ1njd5XY =vVCմ%X1]1a=36poI|Us) TURaFBg4BDH4v V3Om $|~d6(ND>s"ñVБ:BGZH i#-tБ4z4*83s4=7D'M?QnM? -@)Q"bE$=:pzo/fpoUY~MA KP%\TT**ٺW V+%TߦBZ8{ކwmSȵE3(QAԩZ 3t_.P7Cz,۵"c)G1xNY !_bŃ֫+D˚t6Tq?Y @IB|Z d"CLTLyiD?}r(<>D PY&/Uzޫ)<ɫ|T^Uol~ns0͊ rЈYyV!ޟ0ЂΨb 3sl:jRފ^ %`9hhJ{Gg3Iۇ] 8^ԟ ֋9D^1x! r =dϗA0(A]K'-|D<(k}_y-yVWΟRCC۞4wf .m*^vi[x^)ya_b@YG<׽觽#~|Uh/!ƑΪ]E|p'{snQ|xw1wkf]RϟY4\b\0]+3 Y>VmnCL~Fu#;"^V !U?#?RK3i%/`;3`h8h% ˆzPJ>AKE"P+i]ㄷ;+s|_Liw^ps+EѴ!1߃*f CdD a-&H8ү"| Vs.3:bvqIFA;5hu:ANLt`0o\R}d?kf7^~~hm1hh44qx3jJN@#iQ2sO`m6R|ă}FnA0=]j yK.v,}/(f9Ӄ'K+(fqvb*LXXÖ;l[Ö;l[> Hw&8Fu54eQK]HD("Ng"c|1)b(EeP}\sݩ3K!QRPJB)Z(E h-RTTlxq;͆0AvC{ 3(aDjA&vYGN;n {Rv`q}`dA{}4t V3lpK0;H1"Qsixc]plGMw[1w x[wp|$p8>;0d3C=u`{}N}n oLǣ,3} h[DtDq!Sj+cnAD= Q⚙*Dka:Be3b=m{A:i&,8H6EbT^[Yb[eC`01d:t','cT{ *Ô@0HƂxNa EIǐ3qC:?{Ѡ4`;sXc+K0Ұ-F?~xm#rW6lK#8LhHa\ "Rc"X,#wj+X:9J i80@Μ6>SgZ #9aF\!)xǶҙ8me)r>k lw$xGi pȑ=)&JCDzBTti Y;:K؁1kU6y6@uFl5roec[F' $ ք )vT`=kFrEX9!8#c|pp猼iNzz $jM2aSRu]őU^u"0$2z+K_wۼA<,*F^[# 𐶕b DzmMbs#J0G1 *#bBh} UE4Yv\D]^ƜkVU7J'!__f HnTyN+` ˊFDqx==l+j"wZdJ/fl*ZHQUn}l>ѵv`ۼ$Q ǩ90N4`JBLj, "//P7zB& k l萋=Of-l)Q~xjJ7ӷ W"S4J*__KvB3o.e}n(KaS$jQM],w}}v~_@ݡ|a랒% QDSC!5TS[(֯4Y<]da.pm|{n]vz7FDk(y!WL:ի:7J_;-wl/jP}9ؗ_䡹{9z.^2qA56@oTu/_g,X®YI_f Xm2k _Ä[Ϸês|sC2W)c̺4/6Jb.[}23ae5xviҹWZ55Ը@ʶolEr ԡ%,ݛTF 0r9QM|sb#ց5n%AQTS F7$ Nuu̢;Ѳ}V$;g`K&JƯhqhj); lBZ3 F եZ^;!Qt]xٶ4єv؝]A\y{U9C`x}d⌡-{jF$8QM~ccNܥ:1U!ծg9) 6.]7mnDkF]' re/qʦƺHm1h<]h(~cgN RIOc"si+8_&Pix` j*٠ ƙARLL̄̄̄:DE2, # oE `EH +lO![cR;ĬW逰  <b1Vٖ;]_˹\MTǣIII!mF)&1^hJI8thPVqZ[Q~ gYZ7H~;_o/&|fRעbӠgOfrBƛ<]NP?l.5aKza{eIq'c{CAowNC|y(C#m>=>c坛 mZ{o}dH}3ƖԃݫHv]uiv?ar3TA}4~`r/bk0^u)xs'^6%S+R[0EAZaLcW,ڃ[LX)-Dr-= ؏_} SY66uF0b za4%G_t%$O*kw-v+my:: I{H؝gk3W?'y¶=_lE񎊠 yuMK!Kcm#!{:[0P;' /iۦ<~~'1>G@'؇o0,O| |VNA$@tXq: |+$oFg/Ey7o4q`ZS·Y`CͧUf<.V'X`9ϫQko$i}=Ϸ[}Ͼ<$ :a#C>>-m6 3 jfP S~ Q-tr}#.Qar1,-Vڥ46=XVְd =!"}0E4^JJd 1-f1gvIa8rZk9;qBoNFMNrދ;F&_}k|e}wy-UG9B`PBb=D&h1:p)my䚮ߦ}?կ AW;Z7F1r`xQҺ}S^p-ޏ966K9!h/Ԥ`ƚ( Oz;rkAW[N?O/R2Z; ZDl ha[0 ^F=;]>>mʻ&uV?>S |u(,%$uWͲ]dVUA6J9헉oX)x{URO$6L8Au,XiupqPU| oqgQ_㟸lɿ~Zo;S7E 4EX|LI%fcY(/X!/upF lDp @p!O1jvZ<䚅5egckiXM績WY1d )yeo>nVPđieyAlA6q;<00U A< |.) hL>9ŰxÃMjIm=EwD@4:&e} EWJb,#FܹXQӸktM̊/ͬ?", 2IX8J,a4<2N )(E Nr7ŷN{WQXDȏD21"U>A3X E V 9#aLLȃZ1ip*, ]-UGY }l?mՄxjidӷ_4$˪f8 *_]̑qkkZrS\(Oݫer ly^>%7O9mRJ *!_QADB;ɕX=הwin!*!Uc--3DXKÙE<&]F` _*wz>^h[p>xZ!]~3x fzcA=rXn5 r@`4=7m$(3 n>~Z&) t:-6븮#ŵJ-+(f3p׭ˈlrasH}ʺwk1kh .Cx 8/0L tv{8э&@b il DI*ce(#B$zpO Qۙp猚\8[z$yҎ[vT7 R{9<4;kr5oU{t7}!޸# W7_m##`A#cҔS3I~!Aـ4 <.IL'-ϞcHE08~ν4#h_6#xb4-GrLNhm?S^ U3N>PC4#WqfuJ!( 5hq["м/覤G5+9X6z9ں4u,6p7bCgʡ (?r^i~A>C>{S+Dz.14f ɣU4 }%pIC)PԔ)t!a{PrJM๩\ZPwJE調rrыa\zneIIuZ h6$JSnJck0`imNFr`3 F@mm}hARWQjWp~-TQix` j*R3e@$3br_a0X[rvv XUR ˖5+jb,wYWlodU*R,(/umuUʌ"21Vٞ3MzhUtF!\>3$y6L,M26F}foaoSwdzm<hh7Ss~a7J#7Bxs܅'-Ey!C[^.~Xʪ7M?!={+GT&U_>g.W|ZWJWiL3C Z$bT@600ddk`]7oTu-߾y~~@!|?_0~7}`47F|ӏ7[;۹,g䢜@f2J/4*pU|Gi 7|PmA2d@Yk$uBm2DWD^囹 f6R7i]׃wG巿--A5u}C:Eͯ**|q:U0ݏ ^ɞv>t\C:.ksr+.^%zkߛl}xNiS=y%tpDf^Ï3;:wBN"ȗJtEfqi{/C;Bf&:諦\):;P^  oO|_\>q`݁]A',E]ry#>{?ԖWD$Gy>G^-ZTF{"a.XTQaJÅAG.z|G ~̯"ݩӬf;TKNj!E9 stF3S2`cS`RfS!AC0 #P!VC2 c*5(K)µŒpysmަ1rMϩ5zͮj-eiDnꥢr T!PƁ y` 0`#wBr Ɣ9ވWim{;"E RrȲҌ;d p0 AP$@2AzhQ*%C҆qL^X;?q` 6, 2Ie!֯a4|dRLP$wO/cX[SA"R`XJ4l R.(r\@)BX0Ƞp" .E, y_L0\agNNWEMV6mda:0 dzR E)=0I {A#NwAY & XTPu0/"K\/o;م}[f@? f~x"|NYY#0쮓a}{U bá`e|#>Yx[wVwΉw8@~D~hn@⡛-zUlHb9V݅K'P>h`m r໫z۫ߏ'-%5p`C= ̈4:T͂PzQ[5ѥE N$VR\ކפj:Rj f(H&zGGߺ\X2(k+vt.h40l3:R\Pu0R~Z|l]($T#"_AsVn}TK"]zƈ`TpXh1,-p+E\IQKE8ֺ3Dx:@'N'=& γyϧ?;kFM?xKS! iO-' }ōcDu )α@bfe ,o^Iz5eTK:ee#׭]2olߜȾZK2)68SLGh&KM f),1[oP{wg_d2;ٜOAR Z sVA-m=3{9A ).^›|?|ZVBV7TmsZuTN_Qqnuۭ/ O*lڄ(FiG4J"H#a8Jw`fN>/^qޅ% `> & ֭=|.MXwe !~.͖tmTW^C^c$?5^xW+{l\ϟ`f LxEۻ|Dw_(СĚ * b)BN٧ZIZm> R_o'ԡtc+֭0:,<>-dT3Fߣ>XịUh4BkM̨3hQ9b0XJBԀDL,V\8O'V{dqb; nq$~M~Hz8?$!920mJTQ+~ce! <-aV&ɬ.ISiU.?~H ށ. cv6}^HDnqo&8ƹ! t8),uT!4(yM+-4XG-V2aa'BQ{DR?j~.џ׃Jqx{rA(@d$gy$O-9/G aZјQ[rÜQ [XcI"-߰OH;9VH;9vH;9ZH{T3.)iE$RkQ*KbB0BFkT,{6ȹ"cZ)"LRD h;*lVل6Ϋw4Zqu?OؿI3uG{%~Hs̹<`8f@}Md@GWPܲTֿ=XR<E,!]Xnu '1bNFْJ,⋰K8i4A0ɵ r B($<3@EpWL#,8ZI}O J(wXu؃v \|LZKϥKMf>d>':̊ޡϪUbSOǿ}=8IPRB<^6D] d0JGYq935dFKRHû;_Al]r",MqFrQ/Op#J%%Yb MS_o/?گ;,xo;!7߼JjI_t.4٠^p (DG.F=J1}p ]*\PY 96 ڑ=\(b U~B+E܋pbC-p~U"oxhou=!FOLaPtTàyԅEKL菖[ 5ץrPYbjF44T2z1H#TPR*PYnO=(aDA'' R0i4(YG$Rڲ (ُzZEMvIBԀ#C {07T04s @$*R@ ;̽JRUN#I\H"EHNrgDiWgnewcN]-ɹneXpl)L'VX qJB:Bg6= FYlv3~rOMӹ[ED!e4S vc10i VH*{KdH_A.$˱oMuMC` -,Q&¿9N]`X9NbXXDǭE/`闲z7Z7'}( !F -#l -_,^弿tDkQ $JH1 .8>jK*Ff$XXP1g&"kesQ3Jk{xLa-=?ag_f' &FW=S@ fQk %I/P!`V0/2ZDJGt`ZD"rRV ¬.$,r!c06S:$ rZBXGm;\,YƲ" #$$8XG!1z ",iP%LR#1s)[kqZfI˒%퐒vvXԦne>*jUu.#Wëqxsi<1F j &(~M!tiBY*B|M!0I `.2 `.2giI"Υ2ʥr)\+ʥr)\5B=@Wέ/C}ݕhOSͥ߾Rï_]'yr鮞Jw\:Z16ra 1wiHQweHmvQi}9Qu.Mn5'A>۾@7cv݆5+@E/6ˣ[EOJ}C/]Ɏ!1O,mh4ѓtt;.nٜ\F29ۦɕl9w+/ܮ-כ.j"Wsv1w$fkrK5?yF8G9qaemN *zLOSvR`Ew߆K:9/R`䅹R9%<m-Ρk,׌&[\r:Wlpw}]İGGF~bl&] C W;<~["}s|X3A%AE RE%H]tZz[M\Gn/u.V{>۰at6`ya>n"}og= s edk,f7nzi7@Q:&D9 Qɔ%YI&$*Y<evS|3u|ؼ]1xǶujZ4bf ۆ~cf ne6ly>\ˬng6f'6DžfOty50mP}Wa]BRͷab(vM=1k^O^nm yU 8^ܴ6VgW (4x>wRw:W=3g,Ҭw^aݣ|q1j2qسx~u[]^f37v{꠶PԸ8x;Z8`mR-w٬kazi%q;r.`򡁎*Vh6Φ!^[K.et0bdo\K~JkϨriX] e\gX\%e\gB IS:K.8aD&Zϳ*\pn&',SVȋKxFtt>io2.1 Nfk9H2YcV٣v?/^ xb$4Mz Mowr|y`+v"Q.*X)3\ PtN\y-f+2qBKy>g< I@"6(Y'ggD ƉOбt&Ξ5O!HX =kEAG"EQ2vG)Q( );d8B`|IC6w$o$4"-> arJS3w[%Șx_<_0 Ý/)_lW`?_6&{4PWH.Џ(KP<2 9"6iʮ($C{lG66ʱ.m[;s͌l>>s/-ЍV]ܲԖϴ _[=)5, @{wPz+2K' JI`X$>?ե0ϑnNOL>[nY,Ү?n#%Q:H:t&h\Zy/WWI9zOûǿ˪~npUU}#=B7WP%p6}P;X P ՙ AyVG!u~`?]l™R*0N3SaPŗ%i8mgi˅UWCY eDYծ5  ]!3!-'>C/ ",זA%=ӂKC?eS=%3Ĭ0keB׎9 @@ڪ8ZT\-&$u[u 3{W|n^W/ZOgO =|scO6  9j#K*,[$rYhJ KiCex;1z{#}`gw3=pfykg/ ]Io-9O~1S,|3UѴ6S#ߋ EbW.WτWE}go(T2E9g`]-_i`A+,-yn0) }Mg-iQՃiilENTZlĜQacznvVׇY}v? ,tvBhY\ޕ@ ^Qڒ0FJZQJ \ʦz.V{fHޒ3%eR l_vb:ۓͥtJ{g*%vJt$~BZmh}CcS#aE QJ: F.K{WPu^hn7يt3NfLg# @vS9VL]""/NŘ0&B `1ژnTz1I+ 2:/#1k !J9 s&tq T7&wojunܯ~T$:\Ÿ-ٹdkdH Yx7`]YoَFVM+.f;lGWi]*m;nGk){tWK>vyOZjL٤ZmxKy⚧wHnv$9|鿫ןFwӡfQ}KQ/rk߷.MNŸ**-*MC7m:䮌"㮪'-"}wwUt;tW]_PM6k$UZ{wUGwU+) wwURUAIik͸+/󻫃V0qϽpxoIizv+hŏY4tR}' *ߦ/\+&׿!G,%2Sa`%g!JcF-p) ҝ:(!Z"wcAvXP*1>gRIᯙß@:?ƿ/&ajH?La>M =T6Ս1+ΦOs3|o鷩}-*~O_5Sr]=Dw6c*GQ 0t# jEďcj7ƭ(ғڡ(fk%D/ei"EA:ou*0&HeK9U1悙)Q:&IJ幁<2z3s^5kBt鐅`-/Dx2BF|rEЭI< _d5 ,z:%' NBM];6dKQ?)5o"īg*Xi=ZϤ3Lj=ZϤ3Lj=ZϤ3L:8| %h[ϤF6z&IgRz&IgRSƸK*V"k+K)Vh}/VR+XIhmo-ZZo[kZo[kZom gGC8^0vLq !Hat?(dPXY2J]sXVzNVzbXuX@/cuZq#PE^Y+Due.(*d~D>)! "l8ffjYB bnivA8w᧫7yx@j4N? j4?G]six}T <ܽhTHTA>lPKPlv,rA3A*YR]^#9E +)4s>VX|IQ47Uŧ.Vu_ e5emW@W2. t-ۆ΄RЮV,Lu[qP(G/?Rʨ,AdX1ν SSb!j FSIŐ]TH#&ŬH˥WpIaV!2%ZlS! ω`qеLQᬵ FJפ{<4[oV]f 1$ >m HٓVGgJ@$tz-ܧPCGBsy0Ίf-*<.)ՈBD7z ŏ@'C.4Nezǥr99Z笱HmÇz L?;kmFEte>$ ;}9"V,K$όg,ɺYZ6cwSͪXJs+%1"6 PsI.2q/2{lEF=obB5(#|0ar8{+%Rap,䎓]5DԕTASPag:~*o65`'q59-IS) FPۇ H=Z7Q[M*Ffd j0D VLy=k>l#P/Yr2>^7y}nW 3 gF#L`xJ^B:`^el.DjBYd+$,r!c)f m A$ZB`#`&A[R[.ni X(f`rHx +Ջh.^.h.!3$*8ʝ7\ 3f4j|sWL#,8ZI}fh{ Emgt)1^)ED6 3/wXER=,|zgziN7E $V[TB8T+s5/nM@ݡ:\ӎD@"Pok*J[%A:eP"f08g&xu;a䐤2HMLc1"' n8zpH p<}ъc蝗qBsFSUK"$0+!,#jPÁ R'^Hlee!H oMTX,Q` e +g]II=& * : ,9,Qy 舭B.:.eIR`Ha qFZ+-pFV/n>mIW>qZv7nƂ~ݤ9'N 3%4 Ai.} (4B;I mcY<%^XV.\üG -hggE$u;:&܏Euy O%?_ǟ5w`}3IKqY"=3y"G_ z 2U{K45J8!Xhyxdz?~P\])`<>+I7{O`tsq)ck\0}*e: o<7I?fK_MbK*Ljd<)V֗ RSFDD b FрG!eLDhƿgkC4nP=mGwkS׳mWiiWa]K!c1BoZ`tY,7,YydcOPJO%k/%%D )XZ1qDðq I> Ȫg7[:nCnRwN'тGely 3Z('Y.#%ΒF2`f<`xYB7+ ʼnbIw,Z^6L֫ LAʤ6 vg!kۄK2]7zܨSJ뒖[ gMwXjN4_-q0=3Qqm>w,n8'-qk-bmfݰ!q W )Π?!NTˤNmэbD)DhorZ1 N9oɸ[0Uudrz}?|DI.E4:eBqj "w0bP06:eZ:Í:"8b, 5%5Uk=\TQt+&Xlƶv\y!@ 4ښPI9#:Y7C /qz[gy;Wֈ%aBQ@trJ.z 2H2ǑjrhsT`4-,ZlVԑ HitL*": J3e &Hdbw\'ZyێM(V.^XZ`T4`$RA%㔐"U2AĂ埋;wb#ba(`@\1Q*S0 H‚!FYN.Hab,L&򀿘`B+5 V9v8μ(jꜷ *tja')[@z~tQ:~ U8m!Bf1,`B4RyBEԑ ü,z)#N4&:y<cBG7Л8B7%{nð7v~C6~? \9UwpmlQSg1=D mC/(VųR쭦4C3A*8zN~O{gpOo |M5Mʟg1T?ht܀o}G w_@L r/ǍVvG;ǍR/xSrdQY0QTg)gXU~o 6>V`C LB IBѿ"tUpoM涕&y>-f=[^0ns}}.Tڸ^ΗM8ً=3pR)ߏ?֫^׃?`5@χ0}<նoW%߄ikDIQA)3ZX$#e7]HO]se0;;.+>s0_hlcxǐ#h i-WVQۅBB,ZXJL!0"~ [\v.:ru'k :ՒH(12zGj+B>j ,zı֙RPЉQN"p}`Z<3ǹr~>ZeN~3薦dS9B`PBb=Dp7FՁ`8QUɄۈ-]OS&{n]lr:w ӹYBK|37Lܾ)WHZ1 P+ 3p3CٱycBLr|k<.,Bٌt# AZ>UHrW+C#Yڪ Evsr AHRVa 'gtL/{-#1FSDݳZ@M|EjR ΩuJR47.!̉PjػrW)A*.4w2 ZVH= =E-k֑t%*򰪾*H( s) &^Mڌ/:f(M~WQ2)( 1ƞM΢ us;zOuc(i3soX3M~Ue.`Ƿ)'[ z}hwm\;\|8mOEe71* TLa>. ArdK0.%"}ȀG{M˾!+DجL%V 03UUV6jֵ̄P1y6aOb!sV 2@̈́ ]FB2JeìBH*Dָ5NkvHN;k;S&;Qc:_"jjwPBbQ{ 8QFeo{]C0}ɓGcGy~C/:"tՉE@"- !W !W!WG !/6'(5Ap"g5i'jղ=R"r@EzeDR|NSR׺2Hg4GmP{#g;~z,N.i»)̛v\=o7tv$Y"N4&z^ 6s"cОQ&M&Jw.V* S+ Yk;ƫ^*wy\Zꝥ.2}֗=oT2.K,F@uEQu`6Vk[yzξJ?K:h59hmNK2t)@Y%+Xyz@uU V=aLЪ[,‚5dϻrZ|UJ%v/}dh._L8?nn H~xjNDžg_\sW@/' 3.ߙ<2we;׋竛if]7z~N7pZ't|_$pu9N/aEw 6V܃_Ѩrʼn!;=>䇽hVOv2{"+RgdT<e\,JCh*(E-k+&؁>qU=jT^RkURWP\9kJW\"ZjRz;v|*Θ,:zG˨%8zz`9MqEM\?xWHq?< 3ǝuuq-Og?1Œ eVcO))\* bRh]1)MJd^{i۸w&V͂6_}h -.:$Q:WrS׷ZiHFiŠ Z.P cr."\Pu# I翅)E#KEx BH -R, rM @b^j/a-y[1lņ&"mL>FHppG/5{n!o1k4vC]0*DR#rhA`dG{r]}Yc9Dۤ\ּŵu>EڦъZa[ CP0T+ CPʍFVJ+ J+J*J+JT*J+J*J+J *sdwOEN]£<āN_6rS;w{Q?'[/|nnG"gt7JúMyONJ6yT6XY)J)DR3EB ,@ h\}ϺYt<$d_~~EhC±眗BցS >;Wc۫EQE1)5SR*J&%0ֲq(c=[ĽEdQUqo=,㵌mQɃ+-܅V @5i%ӦZkwݚzӉ7Wɨ))D#֦R1h% *l)MNZjRBo9%c(W7!6qPk}@'iBz{ki71t\/ 2՜= DPU 6QcИr^uEthlv#Y-M%`I":%V< 03UUV6j֬̄PlÞ5{C< mh&6AU*#-f4mdFb}'ziiga`B#ņ,VYO;6ǎ,g#;Dp8c%>/5cƋVSF,vؒ' HJIJNGKяQjl$,+2X $%aArR/u^=^=RF!Y'\!3W18 RdNB-|ɾ$N%`>{}NِQgl,{uXO[,_rl4H PΨi Z8y{ΦiV*iOaS%Ń ?&uZ1"عhC ]H)uZ@ROq@8SaeQj9rf|LQ+Fg@gM+UWC`Q'&ECJ:L4oˎ{0ڲK5e}XH:~γJ]wݶy:g;?TtGwnɗ3O"Ґs_|:ܚ+DDM.w)?,wj,}Һ+;AшE@nt}yY✷&frǏ9=y~iyg2.!ikŠw?r&._ FA* HRqJVUI(Eޒ=L\ָ W6W|X]qG{z.QebEaɲ7o7`SBH2)ZKƀ"f+sI&Hb * 1c|XO۔qXNHve-ݺZEY[J6_wg81fc1#˺qpbjLqds 5xr*5GzWՇکtQ/R}櫰DmqN,c>{ouq1G.,d5Y#,;q_֥Ž%Xc\}[:s9$-V!Wk6hb&o'1ieTtqLU=VS )JD褡עέthZW@yS(jnǼ=9Nzv~#/&E늯ً,$QSc1҇" d {)tA.T [`B,lכl{~aeՠY 5iu ?Q=&{#ZÚEi}!,:1wHd:,t`^L֯?\h:g4tnX鷅DJZIOxuQQ !$:+.8id&Z/2)(D F(.1Q0*|80.1IfkeRMOJ F{#g6):ztl8U6}TjP)4ou%o>- ؐrZ?zBQ;B` eE1U*#UA$[m)\3:D 93& 0%5 ub~vFVj,ɉ=JoQ4NV*$ [8&ot+ N%cf efږ䠂UlD`fʚ}q|$2а|5_F711޵"F !k[dH/Iza}d(9 "ll]JR &9VPw\Y][p3O$ ]1oUv9r-HFd[MnF}t:pɰyBWTVH3]VyNo9^>We^\-aq8ͫM_Sɠ6  9 kST}p)艰Q6Hu"HePl;b̳T4WDCt|*] %Ehz8كtUǮG+>M tW ii`HCݷɟV.rB.B~Z{Ӷg(IA52Gk@&iA"kN,It"2qRB[VAQςgs9zsO7p`P"!l% w*I.+-Եaeؘ]7RwCjP|7 7sbj:aSMd?ޕKqC|NP(/eHC# wwmHtNR|3܇Oł/EEJvb[/$[- 8Uf*],%=/s hM&gΰ IBFƊEp[K[9.rNgTɻJ!2=x`-م9][)^>.0󬓉ޜ dVn_ұ߳ѣz[u3C ǫiϖVoV#@bORy;wExiG?}KƓppv~{q|.VL3IoZHի8K<52Ë .itD2}ms:oyy9,?,vn8{m[2TCw*~>a: BYIdR)V)cN 晁XN:PΦvqW9@69=Jz،#ZzRo-qk8;x|@/~J}m+@؉a!Y$+V\%J$ /,UD+(.CtD۔ﰔCT|62+DD&kVФJ91w0Zk&niM{vl^Ziy)Y$B8|HM 9iBؔU QjqI[½ɰR.K01J rM6 d@EgM!B5yNR}M G8?詯W֝?McRL 7yh,&A\K>ZmzFb?O2{RŮ*%Z?2^ G фzAd*S9wAsͬ^x{E:"m%d,hBrfEP C#/AAe'cL2CSc{e:2݌%[rH{~2uW*iiADE$>@'e=$zcVNjmNz s^`>xtzIwNi gL&ѹ+$:i\T':Dg% ,bR**usU7Wo\in"9tUu\IWr:j zs͕[xRSnSSoGWW=!9&}-߫%`/`25M4g8}RK.(~s~N}ڜ.Zq.fuL)Eoߢ32W${6檈 .:usERjٓEs儡?gd|Uםr2ytU7Wߍ;zIô>z܉5E vˤ[J\\s`F32W$lUWs1W$H 7Wo\q߬ Pa4?r>9ƟGs,ɀt1uyAzuyJ8B0rrbL%fsj5<9yMR2 eG6pxArL Pp2\IzgFr"eşp#F$\ QJSJs5SE Sh"|R! *I30d$ BIh%T^*z\l!0߆^?>fU4-~X^T3VtiN?ݬΥtd@՝[2F!NAu$FyNWZYg >etd\OO9qjsѪe b9aM`9ȣ撴 v}!-Af1>VHR(IJRd!t'ELBb6L V&1OP^yd6>x5y5 )u;iG$zs8^q5z˦/'_^}0-k+Ӓ;.ť9 P)W]42%BG#sUPUY("`6Qhh;m)uPi%]G"5kXZ~% 贏yky`9-I&8$$Ι+2Ypk%FŚ@Q!޵\N.i鞾߮q%@0V>`8.@|ڕ ÷O ׶>kӫ>_F yz2Uӌ{>ݏPE~:pR>sLW"#JArZ 3i^9=*[p|VˍP4PdK$h:+9Zg d􏯩gdXSY%L01ERYmG£,S_CUCiZ? ]=<Ĕ/ژuݣξ޶(C{ne&d pLZ?)G7%DDkFӺa(><[(Dya@hY!܍డCsӅ G* l!\}(d\d3˜8IT&qY(J%})~&C A #Tc\ mtILj`O1ѹ>IӶU/FפV$ЋIPrTTZrHJfc^pklWQԩeɝuOJۇĶ6O^E4ڟ&vCC>Eosɻ:;h=r~oKO%0.=s\Z Mm2jkrJ9VrG)K.H0hI<&3xI9W3-cklQIlak3㵶Pl Kʳݗ(Eݬ7Էʘ>L㼈?kPpex3,v+-Vf k!Ra)ED AC2%ce1hl>ٓ`Y;`0Q6>8;D0C $RhFM(.J,64ec}G:}2m5*emLD.57m$L X2б ^zM5퐚vvX#<9Bw6Q;Ղvis_ܗVSCVDstRq +I $jEJzV7JD@ּ̬h){^`O!NyW9o(CJG 2 VVҠ^#*,e$e%Puh+eiθ<;kN|U!hQWNjʺ&ZPfsS ;NIennd%Ef)H'퍓p<&HZ)i8HVyLۚ8;΋ߎ7av7{֗IĆiWi''Ŋg>DRZ(mLFl@ 4&&V5Rz9e[)AzM鎦л(P,pP>FQ8$М9| cd?h MJ mB >Ō$#%k3]b kђJVꖔ֫zbTӇQ얎YȜIC$q$ޥlh=2*9bJ`&$ ױcْ*u ]:Mhbl'ڽ(UY^('m!.X)uu[Jyk+WA| ?ԻA2;A2;6A2;Ar\3a3$< .`Y,N)3ØѢ,֋nTy^Zydu!gЀbp.ifL:9!>)֖8ۑ7݁Oʵ6Sz>rbӼdvG}r<l˧K^ƖuqщБ#WNO6,)r TȄ|TA)3GßӤ#Lz!&=< JA +̢K>IHQ FgF"9N&Udx0".qu˓珛x\Y||޽2(t?bn}B.~w$sx;uu5k{tK6Q&lͺCjYQݻ4x{z^iy>L -nm?>.s~k9|K񨚥>d}7Ssn<›~lsT*ҿu"*鋏cԞ[72~=]Rߨa,;(qX WW ֺ5 >VׇZ;X>4 ](0 }G xF ma@eLKMHu6q\8-<V"ȬxF>> 5 f:6ۏ7 Ҽ_5*=[kz.~ ;EYezI1%0YI;*w)ZyAZ \o ) | O>9S)xCp|UkWTZl6KTY/P 3\-^vG/kk3+fS'u zĊ+őb17\^_WNYJ %m| ^]ȖV6l\you= uy1O[.-d5o,K= 6 ̔fE/[[e[v']L繊=fXy"avq!grIgͳRi2 ,?Gs]X!@'=z-b 7M[ EM)oKbo@/]԰೉ʂuٗٻ7$W 6F#j@Xƌ1c1a^#O-6&jɆFVn^MdF*ő4.T h U\"E!a`lGxLv >5wv18¦U'\`)$mRynL|Y>ƥ'lBx`P>Tbw Z!HiW߉VW'T!\WbG)V(HR@`] $(}2ɥdUz<Ѧm)Ѱ^,sow<B#h֨|@B3'H鐄\h "|/fNZum/K5hkϔhX<(TUH o5T#!J.( XIw0m%º#G@HPe" v>P ^^K%6jF z%|GaFw2q $*-g7%hNra~4 Ah ZfU49 ғ ??VNC ‰DI= hAq.9mfc[azjaK5~tq''E\2fR.!8&Dj|LVL& i!51O*Oowdo݁ yz bwT9Xpmuq`ƭ#&$P&zAΕEVJhZ3+a`KWB]GYv)MP#i9@ΡNBHP\-8j>w Bw-x_ QXˢ's⮾jywWL˺ntI%dN|̀Ρo& 52bfv+m!ձ 11:K_jqb!@i՟.ꌘ|˫7xov]©KǛ^~~pP<>Zep׏{+zoaC9E rQQ& GWp:+99r&ظŋ0 $ y[]ġϠ [&[{-{=ɓiy1΋GFn:M}zU˯Fй{w`P_g|fxj{(^Oձ"+EeX|F%?!z_s,@$]aMq'ADJι+YRSm {fRF nr(煞YKk Loyswlm#<' aȀQKE Z%DFJ9h6pԷۈ3>M-OWV{ϡ9f m#7 jܾTf>0u=Ѱkm6;=&l %U`-HxZJ]pPwd'בPt~ӝ&)'Sgy.jkQ1/<\ZvRS4pDFŠE,3jj?SL1.5.'ZZFb<5 #g^>l78c,dQusu:݃(pD'΂%M*2{qp_|͐ >⷟P"n%8 ZZK)1\Oi,qK&NY\ow UQTY~/,RǾNRjgc))sxCJCMT\yiHog9(fr0.c$)(Bٲ:tj;ZHpeJĠL`s*rӗ>-CR”nfZUAѧ^nG,$B֓X U|#PԿЛg_G|r\6gr2.7*/U {GͲ 3sT]U_u#Yu9b.uf4hJ(TSв oz]ܼ  ) / 8 7p,:ynF/÷hF7LG"  V?5~F o5:kL/4L?2.Mᄏ󷝇2蔏`V7gx@db2S5yyw"ѰH*6 o9W7puPl^K ͟Ǽuw\YRw`#-I>5Ze.M,sj>JC^i]r‰VɹV\pUf5(Τ u3fQ+mr.5(@蘱YG7?%s'ݙxv%n|(]Uݮ OEj`Bs$ zAZ.} 2b/KIP"`oHᝑP*FVG#$Kdt*Ř[mX#g/Y@0*6lkl۰¦k6qh@'X:EͱLr,=Sԏg*u{,K<.(53 4|6jVn햁9>ɜU>b}]}{F> zA=v'xE8QME&uN>`;.jUIg`%ـ@$Wـ@} PHFlA%§Vʌ=BrYyﮊ36ze#D[^0VvWOwE7o 8\3iR(TO}:#53~>%2EMgj9u5~jzR&3E"r=^7,41 ~u2G~m>RX|@,r//O?p/6qoUtxҬ{ 3!JuWi&!o,_U' UclU7=6" bHH'@JNvt+٢;&nW4$͘\@rtWw[t.Gw+`6{@Os>u!T@*R1z&g\ :sE/zxD<'A69*Jg=KȭI98z &(oo_#Tc(p |^}@^mgveX4dJg9*V~W""oav|QFQ 6&V +Ԋo /'E+.9#u lU&WMjR+ɫL%mS;DueH]eL6碮ZC䩫L%խz6Jmh4C0`E G6ETG:(*9CJjץBwW_9V^dHɗ 1WMj7vh3;{塺KNJ^sND T JXFLk^%tֱ$IOZ>=%SdOS=YOɤc/CdZ9‰!1R/)GIB+c$HVD(!<-Ņsa=76p*! L$xZT#ǔT5FZZjijiځ CEw)8䞮\}`[j]i+V>qFV?Ն2u6V?˸<匞՟l/g0vx۪^Ukj+WTnٍDijLn*n%#v*~Ec܍ip~;g7 F J=>XD; D锨ăCx +-U6\5| kFWumBPg͑5)?3>QOjUEAk(ǺaGCިbiJ᳋exW-}6oϘ?\qK,j?\n[ޢڲh+6yu9aLмWE?/2ɜqlg7׬ Ml*K1,SƔfͷ AAT} 8ŽJRGA.&Sb) +_7Lnq\ ~ЅV o91sf#8'/fG'8jK%8b&&10LH$J7iaƸ<w쪵eZ[ZEM$iAWm%ǣB5-8"^"C Iލ&|Tgjey3h+ ("AKY\p^CQRţhAX[EsM؊M+ivHI;k ;P?{WHO62X@?u/ <ܲ^ߠ[rY唔YpUY)d2G0|_L[󧉉ȁO縞Ijg`p✴JsV:մPV/g䯬ykQBb>.s> 4& ܄BlSK(L,%^Ѩ(AR9 *|IVfRd &i}pQ%z5]4,}5"(Rh$dCEH$+JRYLB(%슗$ ؀tVCE~o[q| ] mm5Am;HہDRsV?P !!^ܗvJn[L>tms34yݼϝ~b]4+| -W ,7j(o Eݵ)oO'=썾vX+/6"k"h1& E营C5 N LYba6?Vos o\)T']\A:0x6jS=> &H߾Q-6.wV߁Ѷ#ԡ#*En-L4L^y6R R>Hd%&HJ |o{s=8/v} g⩇|jFh9b6.o?ڼDש $()$Bҵ/fhcʀ 3!2=+II%Q'Έo' lS8 KI"i%IaN܊6F)W_vcYEeE,i!JI^25@6d+E`i(FmTNj&n q9z\V rE5YYD$7B0:Pٳ"dL5|8^ja|o4n+in5yg`(H녗%T*hAbv|G2h3)9ZmebyC+Lx(x/ 0.K{Z :AFR6,4E:"]7i<)Ӑ_`8t<+&.ePcLxLV5DeGc0&Pyl # r@.-%Mqd YomԍBۜɓqy#o`f mG}釲K|H;[5t/nxCA)n6DI^Q%2U%uVwvH(8KӞ*.1n* , s2=bibC>Uξ^曳{nWWïlSΖ?̓f"b%Q|_]7oo{7YLl곉_@eJ.:i,3|{)C mx_ӍJ7n,(ՙٞ%L0"QfMΐdgB32+Pt+$#RO [~H.J]HSo*ѥg؛8D!'ROsLp?O^mj ^jxѺJFpknZʣ>Rf^ALë(!sU>L\4BLRVU|I' ,*.S1WUZn S7W壧R2W/Ջĕ\LZ-c^&% 5s룗 2WU2W,t*JUr!H3W\YEQ&XAI酳B<{l_JDR>f43e%KPLRC̦h# k(!+)cV!M5 sox!EX?na+K> >iM]Ws5|nᷩ~=xz~/zH_1%R\/ZOx!.:@Q:WrS |f1բ!VL `a)dU@UM( 6 /7:%$PH(5LEd&DmbIJ*ᅅ"-fGW Nb ئd'qK5x&~=yWFJZ0,1(|ܛ85Yx<ݗg{'_Brp51#1K#Wu9nzZLbvJ!.=@.{4WJ (1Y2$ dWo@m,Reģ++[*|V 8K烞|L;7n*dp8]#yaG.0Om(aQӚ`+u ?ā)7NUn;Y#*Yk'%9T 1Ґ,Q61j4 J6V#ԗsUa=76pal*ٌxZ@*A\ڮX8ە\K4{̍ŋ9btK2,bFɝ:`Iђz4|2jk{Ku'B  mHZXZD OL3Eb9̩M1/6tn#m.0:z.|BI <XC#w&@H)Q P)1ZԷ}qAieWu;|}GRR^zS;H^;g$x^q珣ɜ-)VZ: p1{qjחrKt*-']4ަ HZp}f&rE :kSUwC]<>16z|6T^G+fJhmȄ@t5mc8`Ž}lk˞kƄL,wI+! 1P4])*q!KTRf^ZIHeӂZȨR" yM6%Dy^.$G:*vK~/23Ʈpc_Z截=GloZvD*@5 HEyPRf.8!(Q{,CⰊ9mx.USZOi#ŪcgC"~.YNe4;T1nUi}{b{uuW41+raF 6p^2A{{$R U? Ϋ¬>]j7Nsf[S?y[+6+Bγ{|:^s]yz\|ˤ1[,'đ\:Pvv>a33܌V|˻yxy֓oП)kot 5k◧11'M?='zd/-6?mΥW˜|Qr>;y :fDJpNI*LJ$("zKZ9KLN-uOGhrs⓮%=5}fOa0v cIy{iY"/h-o^Ef-wA=Y-52jWrfo ftM'*H\@D jR $%Τ Q@s,PYt|O O ӓw0iV8]CELZ8bͪ_gX{]wO/PfO=a0X=6Շ3VO'&]0MX05ҫ E6^%RVn;݁lIڶ2M׃O7꘏u=qn#REMe~ęxc{K-džM0 $̬:zmrհ@;Ÿ3 DŽ#ꨔy&ud`#{crWNҚ@׾WrKMs(ߕ5OG-9NZv9XEM $~& *Ô%]X 11ɶJt4sfpv L v\Lkq%`U:ϫ:\RACPs+j_;p~ˋ[ԙ ',_2u0k WGFg;]¥'2AB5U?B>JFC预L P:#.ʏYe!|W|u]_#BÌ*\E[rBCG7#jWn$*yCzKuRT_w~-,ܨAU ~Ael~Anw6Or llP ?^{kAOE=ᇆEӇ 1&]0C,@UhyU͹BˤYt8F\sӭLq-LWj^ KsZ99l/xBIdy0\F&q*ڒ'`);O ŗA$ӪI2NB76H WƁ>PF RF[[gyn2~yrdsM2fH4 $f)|y{!C>ZMSvipPϵk"xTF'c jE1QI-S sQ'  ƃC"ʀlU!Z1 SJw(P p4x@FssBI1:0JAexX& G>-1V "}DFLeNx8$fʹJ&"&Έ-(1w[F%\ԉPTIWA I+5?on0,WXG(uM!(g6(Aq å\I.&`8C:Tf(Ҋjn1l>qpo+gǸwrp ,u007|1ՑmQ !Ab1!%9={7(ʾel"|2)]zե1gH)n­yŋŔxT@~ϬTzrI8`~;NXiIo&D+D ι+YRSBm vVw݃"W,R-,-"T'p™"1{O3>h w$Wl9 $F (̊hXqzA ̎thdp91ET-Z4ZsB.]epŜehYUwIveR' î\Mإ mJ =nؕq{"R珙zgBxA [ʼ]]Tu~AJA\ J})*C;ϮJڳ7Į D\/zyF?U_~,L@B"%_RyFs> IE)TWVhx% BHS%XgsV1#bI6 7Nzq,GrŸ%` 2~2LWPgS"†0쨬_ƼHF% yn o}2~&.,^>xI%Fs?>s?>OWhG@*c<6lAI/PPqPDBHWQq\w]3~\5Cibh 4 ]&l$:`%37H$$RJ?>mR$42P )K$)7FqpTɖ5ٱZś;eO'&=nh:r9054zt jOޘ9rX-SRx'E9g݂%ڞ AZg芩͊.gz貉e]T[la[W|( G3T؟n.Qf-`EMn׮r^/h栘bO1EփnѫOgVA)Y)v:1[˛i˒>{p;ՕU.5l\n.d#oU^GE`d垥Kw\]3NW2ˋwU2qc O,}oq)ɇW-Re q~XGWPiYW!Z}wnCէݎxpH2(j:8Ny%NF?^TfrTiV"'b(.%hlk㲨19b:Ԑ!+Q-i+ ϙ90!D[y6<.k/ fdzxkN6VYP.JUjsď, .եUixUJ'~ȏPjl,->Ӗ׻~kR6fO c\u `+tNS]J;ifE΋#eug'U2t)fi 襓D.a9$䋡lo;'Q|S$S& k#x,-! #(Qd%;]vww;I?vﻞ D OUW!mA[>k!_>z{Ifhi4T.߫HHKR:ECHEZZʶdƤ *S3D DX}ֆlf}NV,ً u86 AM鏦T 6G $62X)! @ {4&  ݆P|FD,ChXBQT)R,̨Еg҅^ꗔGlatuVitL~toPЎH$EH$9=Fe1"b]KR둎S;}E~}`"Uubڪ;Ařv/<{yGL~sRQѓڒCҫ]k&yF9=xјr T؆ՀlgH^*X%i?_ܠt83i4?]dUYƿFwM9~s?ϟ&'&[ARpsP+7\]|؂%SOUZZPkl DXs3vRRiaڼTW}^\тEtZA!Vp5pԋ i˅Uk@Yڋv/t=]@׺֙DdHk|z?`vCa ZJƒy5RhOJ&4FZrؠe./2Rwq5-͂맼_g6Gn؍|;7godY>s`SM ~Ũ]*; ESlgk)T28O5G|ݣQv#Rk9 H {NDL8(9sʒЌ +6DdDa=G'mQi@ Xolui s.i̮:(>|[b>Z?|3W,]n*Na-0M3G fg«$[~ ]"[zbUEo9Gzb Vb1(PAc?J; =  6R D1)"#5tj_pu¥"1(*\[{IpggV$NGzmO ׋)~/8cᢒB"((1.q(j@Fd%A2"d!-لDlSKi2I/F R&YQb.Idٯ/_@ooʶձկBTSbqY)dOZWuuIVo#,]"_:!ׅS06';G7GQ򝖣 e/Fh\ XV>@xF6&Jo P<ZԎwieM;`c,0|d:vCDb"${>h-0)i(x9pfv'msRΒrpl_5#Ez8Jm[:PmT(YS (=Q k)TE]dB:xʾ: z]BTK[irƫ?Wr[R46j$<nIH,z:I!mX@8Fo3.)gT,JdqJae49:'05e_E~V.~ʞv/ Q 7'UopcCǷì$4K=ű=T2CR*oK)BJj* d%g*(S'Ȫ$Ŕ5ʰ6N2}Q Rxwr@p:Hi 6R-c3q[(,8ƶ[xR[^cEnB5?8ߞ=1M ֗o.Ƿ7?k-v>%0ܐ r㵢J eOҲŠ1LB2ePo_ NhUͦ.YN {}Q -,H[˴gŎ\ZZmjjvgc zcYaX3&h@r*e 8mXTɺb )2#C 5\0*b!-GQB,Bkbf췇~9ƾXfX[D7X"2l2c\Ψl)Hɗ@)|I)!]> cc2qX'uvJ31DŽ@t0qPk`pRmh=38 /է\gUr]"vq6Y޺ױk(;Sy0Zsw{P+$?Gu>]2kBb3hzPm(~ҸR ն e&VqZO7fω[$G rBHOO Tʠt^{s⢸sUUwsUf0W\YeyTk`vCh34Z,n"M~_!SdУk: ?-c:AA_h4__7jiNmI*bFLW)`?v?L|6->ӖTޓlOu `8N9Gt<Nߣ9~nOd+,SxmIK'\ H- HasIzCv|(>"UqCy|/S%Dad5* l=8y/ݎoҏN|TSހVOSv*TU$H`h) Т!#d:h]Yo#9+F?< b<,/ ݥݒ %uXT9*[VRT$3"O@[ӎG2XJH͂IE@2WB@dGHg!%%d0RxCT?LjB}N PTXBxFsRe *:e!zlnvJ)B[F GB2$(dy> .xJV?XllRR͑>;qA=)^@[l// <ƈ=VҚ!^?P@m|pB pteTCѮ S S  $J,D"'H5II܀!Qua/MrAEsR&1;/|2HұXP*E.X>g(-!غL+qv#hiзO[l>qb\6M9!察%[ i83#g*QŅIҚާU)- 1ys`n2||5 5".0j`!Y %{B="^{'I襯I C֐Q>KjS1yYi {gUDћ?~só>k,fwt݁'!6Kl5nujHy[wzvu7o{tGm/:nќu-;n,ױۆ|J0{GfwtnOr[vUsy˦nrI~w5 +3ҫ)"a),7oW՞/5|4q_cj`V|`7n& 2+QSݕ>[)ie0((mȱAҊȐT ` ?aJO}_ w׵{ؽܺY-75~_ee`, s0z v̆5ʼnbwl3]^6WiNzՔ6.lU^4 \ܳ%BYcy{[fY>NO@ĺe`7Ga1}nqHYo87wT헅_n/ë{4MUN)$ST3kk=|%㟍 boKSǎ=v^;dYܙ D ︄Gv 4t].VX :䴒BBdKEq1JH{"t~Z; e'וN{I$6R4rXcMÜl (2a\Jڨ<#+ -E4m?9IYS8@M SvVͱr0ygk"K`C(Fc fw ^kr]e^h(<'iU=rBr](N (b,?P x iV'kB-*NJbPN)(A[hC:!ݔ+<+3O<0A֊/F&Y!;uJ9:d7a0SxZTK7?hL=wTA%S1Br@> e yB &=M^75Ӊ2߾t[~~$Y[wδ&߆1m>q!2H~Ԃg2ؑ*%wVSWKS.ҫ<)Mӈf}Xq#Pch9&Բ(/A}O7`~<3:|] e6zui䙟>7=ReȮw.lI2o_á)o*+QVA`YFHnN:XqHvy]o٫3:Do:2u5g2PN 8Q/*.Ve (k'ڮvz@d; 3!-ձIR ˋtu^ K+,Yi( V[k_C>neAlXA30t BY~Uz`v0>|wz!Kr'Wd: eq:#9##wwztj25G{ASYxBފ&Y쌓YeЌ +6Y:Lj48?$&+ސJc>f9&L`<*Q(6g抙_o: (>|_byeryi\9OuN֌u3G fg«JHO!Tϸ}x(:*񥳊,m;WK9GH 0 X^X0x6Q j߰,M$]>|maѨ=,Ϋ EgP첲~n?gOfir>?,hY?Q>ݎJ @,aш6q( C&QVR ,$FXM-8y]dR C8\9$v2J*h3%[p3qv[8'm*xb};jn)f䖸b,YG[pO8O |oɺȭ8APm{m3xn=GWj[БLV.T؜EL(R!H]%ՆAq*cSHhlHѡJ.}M *,a>1f8.6.\Lp/WL?GG<6uMVQ|~}W:iɡ~7n_:y\1)jCӇiuw/]j;6vgy /OzDȯМ7Z8k6+=6lF~(7*C6!^g\[w1ܥ+Y1RJ;tWJ̩>zX~__ʹݗqrKҼӥ/lW<aɮ%G@~jesrB`v".TIe dW#IYt6 ! /!Z_fX|7Sz5AI{ZWOxd3iou~嗫e=g"6!S4ٻu]*CI/`U7ЉS'/qXi5f[ ?> ,1$-5p`'z.<nm=8w@ H}XǺXXy򽠏H0DYv,F 5#>rj"'-yU>!zb>?ui1IQ{̨ŀf@DaZϑ fE1a@A7VԱ `GHޚNە֭Kru].~^>>\eꂔS趸~wkypY7SՖ"0`o@KYϴ19kab)$ng:[^|kCY|^u&Gs6Ņ@Њ"!nh!p IOV sNBzA]YԎo׋Ŵ՞[YZUn*6$,JpeWe- [D<8I3(%PYJ}yt"eu ^qY)j'Mt%$ږHH|䪮hV2MG+B#V]*o끰F2nc6iVV˫UNw w՞STseOCDW8n}%V]EZ :a_!Q|4^Iu==UǝG7TԙS8Mޭݕ>˲׷ƭwHA"nm@<4'nPץhk(1DՏG>;r9%b0Ȇdd U՛X,-ʔmPTrg=4xkuGpbr#n1R RF J *1g { y~T sj80?Gv!ۅU׌fue_v3zʿ򍁮vFZgBZ CZ߾;|Y7!Co J N[f+pR])zVlTB\[CARb}96d]`(Ÿ$ت!X2~Uk76mwCoޔYWO:}ȁtnoߜǿ,7p1vq_Y\*s4u..Vp1Ԧcc7L]n/[IIr?3BI -:nӐ)Yk\/E'K#K $r4SdYLp0*V q:șk;إWVapQ7PM+&>aLH48;<%r%w_{ƅۺGU[6Cz{Ѯ)[ZW3aMvCsloٽ>{tp/>'tue5>Yc0JX.FPjSvi3b2 ]4t4t4t4lQ*C6`؈-b`lzWΏ)36cI͍;8:J8I5(24#Rql9*ljs >hcF0>{lNyK{o|Xtsxn6yssG˯cxLMp>g>}>oo!RL<5Ar ۀ).}|3z.7_>-*$qTj}4BELl2W:ݚ%lP#OӌZǩRub'o^^ 1ÃϧRضFgw&GƎ6۹cy: P| 1 Qlj0di<lf7'dS T % Ԫ'jQm`KPZC@sJb=;zx`lO=[ Ck_M}l.)ԇcgAm?<)h:`;yf꡸Uc^tGjV}{u?1=#ι9<_ {gr pI"&hYf8>źfj,jAH )rxާ/ F^'D?޵vj]2 sauRcdRKؤ$U.Q}YAZ%WU+VEɪ7AVf9ȾhVQy\0`?_^eܙ̣1]NNb~y>3L_,%wRP_kf”ROyف3jwt[*0&0Zz*joTW 7sΓ#ݿ)Q-_|M1֩I6ή&5LA2&a˸2N4--%q:-~x,VCZ,.._-v#"[Ra7.$bpB5PYW妊U1LCB#e"4+v/UGj'u >Y/3CdTHSB;/38-v[dXTG~3ۘ"$AnXrU 䜍V]W 0W+><=lC*9EZ"HP[DD F)Fu%4=Mpo؝aEm}j1q{V"d]֘66>y3E.ܒJpŹCwh|@q`tۙ\s(^ Sj` KoJa,q|/5XhT#8.Ndn,fe@0adŴ}O ~dz/'{q,K2=AoN@ũQl %&#`גe7#ڸC#i\ZSLI8gd[5*h}DDJR٩M#ŋϫ^,XԵ^)<$J&֎ei|.@Q6(&0m#̺c @ٝk`*:vf4q̚2MtS!ԠER @X7̜Bl~zo  n6+6ETuG-kd }HlɪN7XԨڐ2A'za WeZ:f(b",>6cKk #Cdr^+ݒ: &cұd*u/wBwFw Á Q=ݟyǬ"&u[4 C#zO~innsߟvy̫R+m[܎PB_B]k;ݿ?Z+43KjR(l,(JXBMNSIq { Pؘ90:.ASjGV&ΑъeNe}QM)lq";F?ԁa;v\HӇ EAyG[}+<mqyY  v0s&k FJLˁd|F:y/bZ[Ƴa  [ 3 cPmD(&jYVaLdG5 6`_j ՝=-*'8OɩeUjTy68GT嚟Kx2 ~Y)/_1=p}D=yw=7ue\}6? ·kz8l0zHn{sL\/o)i'<bp'Ec"0p X&\ȩ.B#918T}ݿ4 ř-Ǜzf8* M5 jweI.wy4`a5]`GdiMJ-7CTR,˰e*Y8'èPk$"l(AkAi}{seݤ[tPт8T2OG֖E/*p! ^x9%2xbى \b<& -2(qr`;mB_<_.j)-n|,2!Vy"B&ORf1d"ӟlc26%& < OmcӾF޽;>br5VPlBΗP{YFEͻTq,BG2kzutj67~wǗjH {U/8뎥Tg Y^gB0(O_NTԳ.д?4$o: U' &~zJ M\54sTVC8ʐjcg "*Fzo0lF2Q-REH!4ޔiDƐEA P@`b ZFs,zGetOPv,AvOe3_7{\̇Xty9]i0#8+9%}h7`CXّA}W*Zr: ;t|Sz|uLs#R{KNnA%)8aKg:]D(:;B;K~tm> w!m>}mʋ8Q>h|YW<@ 澢GoCcc۫__''7-Χ*WU?ߡuձ+qڞe}Mzt꺛o} "f=moܕvJ9Se[>j;sIhNdvBj;jǟ:vdjeN UFmǝy#ٻ FGu/j b ZxuS( ZDFˬ|qBJ5f.C(&?FnSo}V܎HYWK?{I>l*wo^0c硘(ĭ,B^0#/ Vv%,%ur)Ťd$C"4:ͺRkMhշW=|nؓ58-Ɩ] 3d[9ޗo#"yF .rIx6RHTy:D`UE}&i۶+˿MxfiLA%ɢD4jذ¥-X[_EtB,.}n6O^ztp<<ϋiI9N U}D1dU\T< +pbʵV`tْsQ S+hX(ʁ+lAJPZ#c3q#nr,l63Bl0`K&_]Wo;;Ҳ=1 bv;%2\xDP͞a2s8]!3=bЪ¦66Vl {xSXHk3J/'y͸ m5;Em̀ݙB5#XLV.D"&A)$I\f ՆIq*vu(hlHѡJ&XSD4L@`j!4fùQnDl6?ED퀈"l2--lMm| 2IJJnGa,mLRg T9&dH%6*b VB)8񫎌U>:͒cq7E7​m#3IGƘ5yl$lA3Bcy%.rn4;G=F9_#z_6 k+*n|X[鶼ͱav_šӔg~V/juvat2[;я?}({= F5[acZۅ#/[Q?M/?3.OGK2ouW8qmszci+2jJ&BG  Rd0o3L"Z ӆ%B/E5K0psr2'p[mH`yO&b,Σ"؛{ז_evϥnD*NtݧP%?,J5Iw7'YOk±i뵱ٮ_vw寃[tY]ym0qR9Pچz./9=Ykhq1kw] WCw_|Umru/'a[n+LD›VaK7RfE|)#Vd+pԿQK^UB$8Pˢ>˃h0Yj!DOel"]9,vX!!\`*l h@YTt1S(:[A"7*4˛I…Poh&΁>i 4|ym??}x-Ӌ4׿kbhK ?zf|9't"@210-(98-k @W'q@ Bi< gi3j(N[uTB5:2˨ @J,HԞQCAK'.Zl<~-4% 26xQ uP(ʘ@`Dŕ«mT5_y \N>>Aπ< y &l* NVX*E]gl TҸwZ =x{fxZ/(DIE˚<F@"qoJEY7=  Qew 9\F|W^ > A{00avqJZ\*\IH یRRb1 0 l}D D;tqx0ݼP]O.fP̭ zLўэqO| izyj ^Ÿny@~{1Q^d] 8|90pyuGϝdpT!Z_/OnmeK7 N% FuT!')KsI&26&cSP")og3lkA O{XC 8_B(Qdm5RY+C,BG2kzuԘj67^zXJ5~ϐu.c]wo쥔ٓLv.:iKC.lO:]*wT(|}R-9Ryy|g> 0sL1KڐB磗V ] )&% elDiHjI>dG2e)XZrNL8p`PZ%+٘6OYmr|qg35~"yiV,`u Ď4A eOڥlU.N)~V JCP:8id:h)y F D D`;WLF2[YWe^Ńs`} 2hJ4ߵEE@&2P<.le@jl)! @Ma?6C@m GOt5c %W4Ƕh# )gK fiCKʣj>1ٗIꗎo}5rPЖH$㊐LI#sYEQDMM @:#R2׭kp%[#to۪^Πx&wBw^ :=Сl>uJo t>8"El]Y-PEzoOs\CO>WII.DN*#5:$Tj CI$dEGSg#\9|dۨ]lh& wG } #qUUlwVkﭵ@V%@ͻs wo~og+=7*/ڻ8<Z~-Au;}  q EFE-r-BSL0<$1zߎ&xSzt]\IרS|,縑Sv5ƋmĊIm¾6@ݎx 9G/+x\]o3ɾ6ysݻ^m~n~|\LDUж<{\n><&Ƭm7#X|4Xߟ;nb>W(j6 fB$g>눪}݃WY Ik+`N(@?4 bwiX蚸=.gbRu%qUUT݉SG>EG#Xe K&~>ƻBCS,&m ?t޴3}}=oHq e[|w>M [Pjy!!Rm<1)_Rmpg)K(/\`j]xCPީ%(Q"; Ӷ(#H-bWk> [g$6RI 'JvyJ_(|Zc{yȚyHe_hE8_VY`{CDĀWWIǐV #}p}a~~Lp>֠}k7e՘Ù=)'uEqg" 5(`FT KBa"cS a"7ުBܗ1Q|J Q | Vq2\K紦dD&z3&g?~v|}yyS7^;Yz! 1"WL"Kބqv: :` !=ȾYv#mqm%mmh{Ck6d%չ+Mƒ|~8w8!j&0k2 H;]R R9`^G9X0Ų,/PM%?>{!Y?]ʡDwM{̱&y9J_^ K"6I~x?wϘدM݂DӾs|(/-|Օkuam+2ߩ")/O"hxZ嗵ߍ}@֋v>D"&?n׾t/iG4MJ㦈 F4^kstsJF'㮚t2InJ$0wO\zQ%8stw,Y qWϓ`\ `rW.=h?M(@<w%WrW"jJ$zJk;ߖ^u|yyu/_?s#ϧ Yퟖ !/"\ 2U2v>TZ1X՚SrbК %pwJA? Jnoɥp&>fM/1 m"As2hתSDlƾ&&7Qzʂgr[]=1aR"hcbgRctS@bx%t:d*}.J~StTNV,z+Y$r ZH(F!Ì`#PP˻F חZ`<Ƶxu,LdR- /e:svsuQqVV\7˷"8xյwj|:} ʻUʩGȠQǐ1Pམѩdf(բKd&zFhQcTxeՀc+ups(kJ`M UToMݞV iƾ:B|[n>5dgt7<C|qI^cV R3m:B"otiT]خbT,\u؆ME,Hm(_ϮJlWUTqgM;^s4vmgm'=!m_ LjSsN( Z .X+ ŲNV.( C.״0"BZsŅ4Alscc_:{D7y#n%&#A6E3d5Hg,VhT>Jʔ6i 0ľ$M^GeJl\ d6?sLŖvp2hSa8OhERGae\cuvӒ}y_qXBO٘K/EB hŠ EΑW~q7m#g=U̺ӱթW<ޚodVW76E/dU3zߣ锣A3C QʟUD?|>:;xN\2F5.**c4{U⍦5,zJ)/@ P\*m)I <3|oN{(%nSk=)7Nićhnڕa.Ź5v۷= NH#y1+h)t~ Aʢ6m>߼?4[lR+uo7.ַ\םUqbէ YfbϘt@XMf ,ٙ7BMhS6A {,Uv[})E^xX...>/6ϲ'Ȣxv!/|ɚMK\>.?6"ܨq@ûw<=9ra=r ?6I~e0]B仾,= f><''xU{9uǶuO'wgtzy6i Ekr=9,;vhYX q^ٌYʑzb/T,~dCpQdb)l*1&k k_Y`R賡b jtS¯KO*4 x!0X@F2z16zX~{yގَ/dx7_rp{#nw7z?<0w8r6,sPUzo[:ުzO}mݘW/OGa[DnSl~\u)ԑZieU|0Q`NHGC]bgk#;./&8?Ga[gN-o֢֨!U,Ǽ8Am+hou@]7+P&s14dŞm!aӆjp=n;~p`/6~j{rlk܍p1ܪ <&U H jTI Ʃ3кZ(}KGxtyƓcL+W0!&C(\HEC xJ ޵4Ǒ5 >&沇F$A[ӲdJOWʥT3#TI<>xYcÂ[1 g %7Z xsWB]J+?VU ߼k|}æwq;oOS4v./ΠN>O3OyHZ+搲0A e/rVԻY2B`\4>RMv*'Yx^3JO߄Q879r\yqWQI]ߦ,]/2E=RٛBwt~qcqs_ў Ȫ`O4*p-Y]Kƥun,h9K(ޚ^ql'T^# ȗsHa:{ /kOZ]Z7aaسEĨ 10@O&|@T*s shݳcJ!YaE#VMմk,x$ ŰQSӀƮ̰q6rvtn!5v?qrɌΏU U*5O͛S֌@Bֱ*TkLA}4N}{ߚMjIC{L!_A#SЉ+Sի h2O$ C{<^28:?X:o֝XQYxl\1*xTKR)(Z3օ_Ͽԅ۟4"H%GIb`3 9.k*"]EnN|rJҳ {];_pɀ/HqT5(ؠ&_F:Z[!3cBb7,ttr>wG,Iϟ(LNX Be )-!o.]2SIsqqgq*p6+Fx2ҎMo%ZJ\d >)c[`+Wϋ͟9\5biM#{YKq(ƭ!eZ(i>Vh/Dc*omt ۛ? z4SUr|%ie[{({. 7|WT_MAnRlPxU*d`2ȿ&u9&~=jԕafE1-WTW)9)mqѹZ?]/VzE* :IHB.aثp*vKi\Co %Q:sxB}KRU0Y4RG:n B&+&hl&O:60ΐBT-WKFݻ Mԝ7Hb elL%CtZ2L(T"c$cgZץ /\vǼ h&zfrL{S[[>!=].~Xqz|9Y=Ӌ;myu r}0Xy b2햇C#DOH\ B_cScP%Qe J*:ߐ T6JP'qަKIsf4P[ )6=lH" d@*0Z f2676ioLc$6 64%">ZX'=[K AIbiVi2qj)Ոka̓20t@R ib AkYȣ pV PQ57զs(ę017L8)`8A߬3 .fR3f̭SSSPƧP!!wM.Ѹ=+2PRM){-ɨ|׋YhW  BBI9z3 NBD41*9fecHl

/ $D3yDIbYk/e18G!1 w3l5Y=Pzt۪dpҫhR0^,U005n8&q 'icE:/Q(JPI"&iY!YJPaZ*w7Dh"O#Hnej7Vdlh XTu+*@ɉ᫘wZaj2P$ `vyZ@mDluj /룫x* Feu  B hwp!x,#m6T @;k1- 9@g@!t9Nhs^wŌM)vhdr[c;č8P e#s4pqإ6Fu\ cQy9ye|p^8,*VRH>ލ@k6vDA.;m, fQ5*)8B*UA(U9uzˬ E*:i,W]HY^ ft,2 cmcHuKm0ZxҘWVW0:-fC<5۴ռ\*o`5׭nj!f=0pSZ`RHxk]XCm58EMyHY5X`4<PqOvmhͻ _𙻶 G,T@s1#J -s"ʮY.1eBAt %0(H^`k5@YJx޵xj21 8e;›TV/?pQi0?migC1IĪXո\|3 RLȎYY 4EL "!2L.XwR)`$diPҪ5Hx;g;&^ _,n`$DFHP> (yb Dؼ?%o|? 9%/+/ qHNUթlvy}IOַ1u3tGz`y=@֝kZz@>zQ⸇ӠmY^bqahӎi%CtIWtSjU 0PKM bm ɣLFeY 9 R_}'X?HF}Njn`}$ıfj֤_?ӤsM9H<I{#4'M) caiTZ֤ڕ#0zGx7sý'C#ۛW)F4rg0xc2ZQ~~w)}xphm=h4gU>|qLKEI%QNY :z]5Jz5JsFM ~>&S+Qlj,N芬.Xo_@;G&%rUzvW7k!;Z*I΂5gJ*YIm5N*ߨ1GO/ܘ f-UuN26zKR=syQrVZr-=cA50v 1S"s>-^|8@Cnhc-jEqS/3oY~~g<.4nD>i+L;olL-,JV$(0Osus{)LX)Pa<Pٱ*^gGQZ |Sm[SL>3+n?CPJ%MfJq#596`'LrWo$f, H)x dT&duvklVVL >?Zxd<1|Fҹ Hר^KTŦ f.Wb\4%xM6KL9O¸?;G~(Dy[“  PT ,pUb"II񧐜BqN/D\鐉M\!#Rx$kr !(ɒvEfy;aNU8^颤CvAt&<2 &KWDUě?hf˔=xl{~L=fE*8{suHTP֧b+TP`]T_ *[$]G=\y=0~f V'tSqi;[;%I鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔H鍔ގۏGp{[>oGM76ׯ;sgN~BK<$Lx=tmҼa:) L+$a(Lu0:Y0GoKi[ۇUe X<8TQĠ$˃Z 9qi#(THSt@eɶSVns{ =〧4ɞX}^}StHiz {y,/q#m_Q)jK }' m㢜g#"׋2?u<&8>#O}q~u>n^~{wy8k>hʣH狒mfRZ~ŀ𖇅_/(ɍ3\¼Pnoً¿m)eTf!סBI(e G8j#}ªE(P^t=]ߗ7 tm&[BZ/?zRηur`T$q*AQ0΄*^g[G-mh ?}g]Bt[aS:\i8mKp%qRFxdY'&BQ53ͽ*NC8a7s"&\VOԪَv3@$W_# ˽x~Z^ijdu֌ 8`qr泌ZT3vXBJP6Hxeۯ0wonFnyUi jقoyƋMU^21 ِpsi>C)SK5(|}alzbG͜;ĐSuOdٗ?x.;޶'`O)焸O2?ߎ3N€&Z4LY4 WfU,V/aE[]]gpi39gV 㛫: W;v )v5XK+|R?{H0ClIV6>. ,x,y%5O%k$۲d <%U""FmHK9n-7 N7tj1Рc΋eWy7yJ|8W|8w|8[|W6)b( e)84&l)\=V O+qƀQp(.]pUg?ẋB]ZG$9G,H3?| ,y_Tgd-lҿ٨-$݆ݾ,X4Fz,ͦ%AdNX\4cn00xmEud6CWgR ]R5),tۧ%BDt'6bLd9sJU\ƕk: 5% ~¨<+;S/˟on1fzWrWm3~z/DJv@ojq9K"]X;|u({qX$&|חXPݘ&|ԍO69.˃9d& L9-f!B2 !)Qlpr82޹p>Q|B G,`(aє%'ar5Ą!{.6<򈵶pqqB/5..Y4ĝW"dXdΡsFi74n2}_̈+npr-zPJ "|X:ٮddL&G27gEG lAai/:onjmBri}QKgӸbvҎcQ[UFm5`MMK%DÁ*";9-e DN+'IB̨AIQFY*7#y 9w&)nTD crvkTg?Nm)]Ajq,"ʈD1w Zȣq)LiBl٥e ֫ )C2>2KZ85\pG &אL @9ULg-q~h=YW:iɱh+pq}#1qC+"@[&=wF2 P9\ey.vs]#w<vxIWw7}.ՆQ,]_&Hi4Q>)7E\X}nm8=Hip| u6?E\M2Ik:\)7;\^e[|j OX+ӤeiRU[O+1ձS*%zW$f7pU/pUzlኤ zp% ݜK6|3sܽӥMݤ<79=x]J' ?Y ~i%v?|= ĝX"0UWUVUWPp\ \qMoEyHɇʹWR+ny"ľUH9W W4z6׶vԤh;RrM19}nF[x l1;l4nFz{diw|ˌ6} K[zE\nEZO?~Z}E `p2YNJJľD+)k 5Wk5P :bnA'af$0'1MMogޤ' GMǣ!qKz}"G =cUGpef= * \i:\)We ?ASE\ݛ"5]+_8YASOKk^8 IWOr`w p\;*WE`{WE\kW$~nOt*Rn pzJ껂Nf˴_PE-b+e:z[lX}6'֙k˛]yOʜe㽎 J3oRgQ3yq2gd-}ݛ[z)8P,oF_oy t[=) h"t ØEbT \s03)rc2"ѯ)Ozt;(ilv~K0$v{/-Tw=Đu_zqŊO}' rn7K=¯fZ pJ!|d $YS6Bs*4!L {J%'Un |msr#{ƶ]i{,{8W q:eQ BY9}OJJ"fEƇF%- |0 8C##̶)$8SJV44/i)[8dR1RQ(O-KCWNk4L5<6-CCK =eێfWD%`WߴTb˅OtiYAص+eҲ@rZH:!,PF@, 2=6GK1} M|\޳wm|] =Ahr@mƠ6ED"#CH dJNT{l!Z_nO钱6փԟ8!@EkǤMd% 4D Aϵ4bH^eHGʁhE,& $LqŌp: Ril,c1' 5-DZe`}dBZƸ8ٺmLt'IMk=ߺhL+dyS[KOtQA*99 );mmaTk`p8t+ݠQݠ72Ou)[oy ΠKSCz { =|X(|S|焬Փ}Iw}/Mc_1;HϯVבMi@؍t{q1%rT"~Tc&gkڠbˉAXF[tR:PTУ8"ҎjqK/Iv=H xSR>fp~ AEZ^`q Nu WG#&y|vp _1KlLkkR.|4@Rۍ!_ \8~KVƁlM;qryT1_tRf#;a1K#ڪ! \cbD2<FAHteFs!wATI&DW;)jOsД1 j !x\rcϿaRsk7Y %TFɘ LFil/E&GH笈M#%W r V1{l&\F{^<%׹b[vo^ȿ%6WoZ, S͜275 LNYgcõ :/j>aY.'“|[G>gI9?R^7,`jљ=tYͼ2ɳSҗT%ČXEC\IVe謏PFQRD8 ;CI?qDmNdz,|(BI/zqnϬմ.st6~J$f^7IW_m?S^pIKʃ[]y=t56/h M`QnL)ydL! >ŎX9 +tۜ3frDMybjF[GzAș K^XX Q)PhSn|L.C$yP}V "9a#bm+j<.4ٗ]p'K m}Uxsgc7wze&|' θA! Y&RrE%dr\04|`KRdMjr)ru%@G 8=y@BȠĘs-8-A|6ɻ `־^ )w!w/_1x/YjZ<[ڥ_Z4nB%LeڹZ: V5h1xE2ī<<%K+K;K-+1)F{̲xPesZeY))⡰ *qƀQp@E: *Ekknٴ\嚇Lv'3dmR)\meQCJJ{lJԥI)jWY pwsA1&dR_f"v&jkmx3yb')@3#߳ߟ;"8PW>d٤lhB N*sKPB-IAJBFt:gE4h()5Z8 (|:㾂X¹_ yp3Ac6Y7J c|X:T4G\M"8D7EZ=MRnbdjQjYb:I(IK:?N-2$(Q7>e0G)t9/f0D&`dTi`VD|娤F !BR"0Dm2 2]syv4Ol}>A<@ F,`0mʒpe CXMXc_".~Qp}NOg4.X;o;w"XdΡ) giz}hN^~dqݵ={|Et2rT\ 70r46y7JrILʪȨ|"{V|11 l):fyt)($L UqjXXmfl2 ;;ʽ]]fܓovuY]Gzz; ]֠q_eψ  Ӓm[RZSEMHwm&EH&c2IV>0"#X=N^`S(zZsu&ێ9ڄVemjug5bE\01֚j@. MKE U&H2H8#t"q$ČZ(")tU0K]FL"išb&*HHEđ0>p>fY :iW;@5qVakS`<D6?6ED]".NAKs6*`jPJoD#J 9RAIPwtN򽰇$xW17c5YFP6KJ])TaJ,BoACJ:ǒȂb,ͤ(sje L ]RYG6B[BF&c@3T#]jO%u9>? {@%m5zZ$9 tԗ_ V᭩뵌FeJ  Q Y%L^Ȅ3dREfpnp|p5X 0ӊi<1VCY\-j r8BZТ"!imxdn 0fuG$-#\)D6iJ4scuk$S!Њf :y ץ̆R.[Z\VTÒrcj'F3cW#@|R&3 JBJp6fM둖%yYW$q䨙MR(n]{wFh.`*ۂK6WT}He %Dhx+ dB̍u4L6>xɺM>||lŻ6cdabfb3;iƐO`uwx128'~ ڨ"ϨhM6(%pg)?$m $}=5l={}G%x79+,LW8Arzg/Krq+#:U 6UU?Y(E Z4(}&I$K>CRe,3`˩ 3FƠ hB%$ƊH?V!9aj VQ~g~> 9]w]sprhq "g3}Gbߐ uC/Y㼘r~?ͶCjPy0.{Ozo.u=/\>J}Wm:s~(u㮼}AX%wSs~<ܚoTk?AZnm:d`@U7 |Jm5}G_<紂[lב,u34)y$Zˢfrߨg-wvCpLϠ΄"[S2I'0ښȜ) , : 02{ֺ3u9r/R(L@QfLYuV WU!O72q.3[yg8kLL dm(ņiARnB`Vyn9k|vJt~TV\c _@t,NФ̡gབྷP,X feN@ h֜? Nn0+(f#tB/ BADQJL̐A KhBQhtΊ.`ւKRNt΃@R xv..TM5[|sڿW^͔|Nc鮧ojz?_] .nz⦙'V~c`"i\ig z+'h5HEŴN)i%ʁs+<է8@O/ !+{H q1#8X4ArEIm0P:S e\ Y/]]Ѕ+ 󜈚6H*z \LMblF蘵ׂGg@}νf5Cd{’ LO.x%M鷙;ɴxQfƓxMߑoE蓻37O"?M/]HtS<#h}j&ƝaVc5ܬ5wmI|4Ygֻ׏mGXsUtu/?&p @ 0ՕT4kݴ=!K!RHZuTfϾ1ϣVΌ]gfC_Joڥoss/ow*׾ )twaTF.2?'Β11gPw(}n>7}ddZ[{jjvK̔ЧYh+|;OMG7K; RGV=?Oי}e&i"LA6 QU+WѰ"-C"X[+ \E\-ኤ\X |#Zpk/WׅIˤ4xXp/+jW\(~LpU~ucEj +Wo8WB#+xk-*r~,pU}їIjWςۨҶhE~:titTr}SEwʟܕ+``D{ IE6.60& 1tAfb=R`'WK]x'}h1p:jzl.q4 <*aZ: V5ѠceU`Pe|_ &E *&ngY{/*˔E S*D13`L$y!%YXB Ke)})Tm3H2p5ƄLBj˵L10Y)6齥7ޅ|Ԉz,/"8Ċ,ƊXiٻ6$Wz6YG֥?z~clc:H !@7@,(VeWef}l2U:Gi 9rYgАVyr&L3D=ѐz;tKOnIXt&n>ҾՀb5,FbhmN2xQ IUe*\+52N(t5 ڛ{sܺ2V&Hs4\Wt'$lkOM>i]G8wjܑ[hc8i+e+OR;b5ɰHwghT|ucZϓ |NM~ Ke/Xe [n[ G2PO%Q=R)UqFFm5E^Z4|Ø")i#Sway݅uiu5h4 |! ALZ!Z}}iz-޺2H؝MP*HL+XJssOurvhA;Ru&뒈  @TL8=X/t@eͲ6kPp 'VE#je6X^2Cp,%g2|WA teY7KX?QT lByθ"t &eP#>)ǼG2MѪ t$ ;곕3Q@?%B !1B*i-YjA` V#gb,%c?nPV^u<^_N]dوa6ǵO00wNC;S[ubJ-3Ur1ŌFLO J;G".E@XVBe Su[kkrJ9V*t' aHhI]c974sgRR5c5rkzX.BUYQM; l/l4:tO;hֽ&WlkY np2- wc $\L0L*2X=MMhi#\2l;f]D$~Q;Li<[;vںG}Ds=hxN9b^np2]9DB#5mEf3Q }6>8LȐC1%5LHMt}C*HF5+jlׇQShjF459jģFNGd3N Dob:2}fg9==cs9|n/CrQ온2: bqQ)a5E.ˍZB^RTAyg@,F˥a'ycJp5'y8jhK6>*bxMR!H' \&x*̕t5󪊁Rǔ`fz V81R(N).p8QvAjlW !iu@~ەW㌐|< :RA#ӻ)*jyUj Wq1Lm%_8uT2j9vȖ)|5zB^k|ߖ{K>K81yI0^[z#yuJ"Euzgzp9U3볐s7 VO./6XjuDo)^2rn9QE[|5kf8py(ekoUz 0?B2W=bfQBFG2 V6Ҡ^#*3pz+a覆JiN&3] (rBLL!ĕ.8n83Y2C`R".ciDqҸQYJR, ^V#G?чYzپ<2]^<. ='_.F^gHy@&\XlSf& {1Eq YGWy^Zydu!gЀbp.ifL+yF*tQ QB: \ !qU 3j߬6ZUKsu£lWp^kQ1m1 @{;^4KsF)]r{ZnxIbQa|G_F>HWqn7άZs]-j ZFWR *q'Omҷrh.?K,>y7nZ X5lYc!>F+hyS$7 h0Eв`%ûTx;Xtٰi9ѓb7:oھPNb,fmh.6qXC#M^oO8``ocLJ "ڴi >jw?nl^Fh;ڶf)7''teqJ/wEvW$`d/ GOaDlH{An>p=dYv=Ot%dym{s='.Y oo-V;5wF뻟QvÒ_n_Si[phHZx)ΆQ&_L:{r\Mpr6նwZ df *#Xyr&T"Dmf 9rYgk:N벚K_YQXTlb{EY: ˼ew]-v0 8t?-Kխ e] v7HyH(޲C,7$27(2ArOʲ̶JS|}?=ZKE,*_sO#iKf(k 1#-Ĥ,N΍N7oN7t?{F?6.;;ؽ>N03Ɩ<8~~H-[--=0I7.UŪbW;+fsL$yU`{2VNbZՑ%xMN)r\YU}{/kBo~j% w \cV"8(dK>'Z=πo"0ʼ/=?&}V0#hykw19G㈵N?b8zX톳g鉝]47,pʢ\} 3 z;WHCsKFD߂~0ٗMZ@5F<9Q|n+B>jeYc=T, t41z =H6 弄vl[6y;>FK!s0`PBb=Dp7FՁ`8Q )<7#JګfC;|`{ެoݿ)Tf^MIkL>ك+CDcIΌ5Q"ݷ6 !q`BBЇ l;؎YbolP2njAZ"Kw8P 'e [WCQGCJEj89 ZDl ha[0 ^س9]`zn涶YE5oތSQA(Ħm3J1>TcFś,V zm1BR`<%pŃބ8o rIe\qjs"w0bPXrtf,;"3K 7#*G`)\II 1m%~Xz!Uxd!R/F|$ZH0  0 ֺg37r:Mqc2gw;jo`eIfhܕguZտ;QN,%aWq0֯Nl@MK fAX,D% 0pe"'E(rlP3?" ``aCԧDf0 H‚!F = mfjwY`B+5 rϜ;38:筂m h 4Rx7%~ qx4k^Pňӆ8p]"A!1`k*\Edttp{qga~w's_5,s[@ ':_-(zw^|tEB4y[sބ| m{S؛2o C&OwwgL3&16af('z\r w!grC' cDO93Kة g&j99vD3"rHѥ85Lfeao9B^#`rz f˦yl K > N_>n5tRAMA5-FlC߁PeT,{63co' P#_8XqqaLi\ZkJ^1EFXhY:OڼfG* *nTq+ BO=TsE ~X=y\r}X5j2 ,{Zf.{)LGo L\]F|A-"ߩ¦~xxÆ?*J;R2oUj6XNw[](b5{Tm)L*d2ϝ9wK5~wvq𗽝6v|[|zuݱ9p8?*<ܚir篛ҸV-]e]qyWX R(ޛ,pbIZvW,dō0]p=MbMMc?ѩq|ǔ>8r_TGtZB7tbwz!]I.4uruQ@rQ \>5XH@^+:- Kii"WLRKVi)PINKi򧔃R/:u'ZFzu9xoQ]i. 'BJJ}* JTA]u;.=?Ga& ^}u4r)}Yu4j|u4*:.uşv]z,fL;u}Vg{/uRĎ]]%*.렮ގ"\7\O>r{ݚ,%,>KWh |˱Z^-pn,m+Dyn9(:F`Yyvs"CT, @\&uA_̦O$IegYV3ϸKk*tEK%&jO`A!NFr!Jp{Ǘw5g<)'$ L|u/uuey{w\!isOfn3} (2h,ɗ8n% !QPGMG|L/* qXI%L5΋DޓݝR r0旳U 4gY {}ͯS3J}SƬ3fY>^Cݿm!>kb@]N${z u_?L\!NO*tR?w1E;hNvt;D+\Diݍ MTIC{dX}lǚSDyk_TˠS2 h呑6ׁR'9*-[3)9*=DbS:Bl7Z>Gv=k`x;m/<*ޤn7+[{uMrT}W{ǕfJvdEZ10HSR .,cC4A4B劍0(P XzNYTTf)ag|i1cK1 qG1BܫbE*LSDc$ӡDpC@,UQ ƑColW c1i~D~s7xmwW*Aw{+tMvm^}`à_Z3ȇ+Wr`a$ @+;6ـ&*!A{# |DB_i"]> d m#IFQߞn'=K}($U]|6qǫL.EM\_$w,sx2|_R}qLotvB} ճpOߊHWrZ98 uAnDveLf}~.ָP\Um3\dLSn{}w)Ӷ~O)ǟ|Bu݆Q>MGۃߩ.nNJ6[t鞐]] 4|MW?|[.>`u<"YzK+c'uĵ:J!m=+F9~;gupC[%@.]ʶ.ׯ.7z[jIou6V,PXEVV/<3ާe-ˈŽh壼qDMҖlS$w/l &nkȲb#_poή;'k9Ye"6Hr`YྐྵWYMLMj&+2Ka~C4wlα0-'и_߷FM*0HeT?w j.<rmhBv. $/F ']O)!d#'_%41MΚz/T~l~{c$Qg/eYlS1ڶwRUAMNj.w/ޥ t'٠ mJ3+c1o8yE9PV&IUkcHl4/0Ϫ]\ݣ*VIaqUnCj'Iu|]5VjR;R{QT폘S{d‹;E8LVOM;W_OR&94 3$Tq} UqFClKm7Қn/A>xZ5<0In!_'=usEP@"L,OY aql_H^ŶA,H( $re!,:d _\2N嫵i*B f3qE{`Cے];Vź/IHm!xue3NfSiqћfU7n؝WrYl_7ٵi uV0 0stFkbǒr,푡 YGIWe|ҐQ.Egd($(ڜ}"Ade uHч$@-R&%[AȰ8{n,m1Ul;}^lAzBDu8_n[#-,Fy2XR^֯曼|å}ӣ_>_''Ɇ*<>Q_q|Yse[ݎ{{gjoϖmNȽvm3Ŷ#WBZz6P??B f=04@zZH,S[=EuSDƬ1kux6l 5cJ(]dӆm! n'n0ߣ|Pw'z\} _p-hv &Bd.逪OSI ZKbANFyWIZfs'{`w]soݿy%Fs&juAG>D?-ҽrŗ4H>!`1Rz7~t/}^ҽ{H $Je`oC&["QfM!U& H,bSW.I"FD])2(ɻ8T"'KcL}/1zÀէLto0R# I')l,p>m c^M,҂Q9ka] 62T*hS·m%n ~gz\ &c'_=g~[>2~K?YgFge<~F jbveB:]{\hdԹ@w炃{:޹`SF )Fad14!VS`'~QJt6vlִ izg^tsڒvT=5+_fo'%JQ*++~*{'5;n)?sn~H6)*ku`IB0:9YaO5ٝjv}>.g,O3L?{MX /*z%da  D#kB򙔀->_eֻ %QZ9^@a\ Ct 4앷d +ҋ+Ң.O҂gޕ阚_`8t<+&.ePcLx|]^Z.&ޭ-=2y`0M*[1dt1o))m+h<$Szk:k7u^d>~v5wR;cl p5hECj`% x?}t $uIoi6,R2N LNPJ?5{R<@|:7ߢay5&n8KYKB/, !A-'Ƞ:y6C:g1&0x'.Aʬӥ2Ut"pBΑ%u o3p77ܼ&]|]*&]_?UXAz|X;~,wJXg:) PB6zi2_rY4qЙM8!Kv,>!!T Kқ:Ug:BF+4[^ !K2Pk2)WH(c)Ƭ ˒YRFJkQ;(ui2HGD,SXKkfN a =Y۪⧚崥|ѕ5OxZqP1_ b*0 ?OKNj&K/QuKΩ @J !*Y{K3PIqlSAu&rIEF)dAF uو ` n6{>"*8:+R3B$p U:%!5ie >;+y6U 7:a (B_*e؎d$ViDLZA6g泚12~~͓֣)O#xjub?>L >=<+GnwWbP5|IIXŵq|FAXM-YЗDM0DhXrt6T9Q)mNFDTkLݖVif ͌Cm!6m[خ>Wdabyw%Rcu6%E.R.n(%5,B1Irp`5g/7]\͎c6}v[c5UX#">mSnHXvIٝ׳_)Q;鞯U< |_l[L!:3(Zp#|P(Qz ,a|NWc-VՇ44 4"Ғ8v3Rj~f*O\O\Uq9sU%8vsUt4WD├`P?$ĩ+){抅vs͕s5W lAbqIbi=zg9-L7W?2{>zN(7>zJItz9Ѳ-soKid7uj>\$\%ߞ s].ƋXOR~5IzOyI#wz4oEƄVލi8ǫ2tj>9Kyy࿮?-v Ge64a0~zƓcr6mJOQh^h8cfq~H|uN:Z'_k|uN ;Z'_k|uN:Zڝ|uN-b'_k|u:Z'_k|u $;H#H-e'_k|uN:ۀ>M}x9OUB}k|P"Gs aPS, 7շVfh`46EPiUdxnO]_\>Oj:|gm7A z!x{-(G1ydD$Y eR$Bщ iP;GVmEYk&c<+xV>xEy2BykBʋ8Իhj > wlzgdz=تDq= `C4ڳ39" ?6Ik<q חw5yovMT6nv;EɖSe[ݶPڵr⁰=& _Rgu4dej?{OƑ_!i!c}01^$H5>m,9HrH ő583ꚮ:.<$[r ׁQ;PB/vB/CKkл.;rN8ȃ\Jd$<42 a\G];~8PB7$<~l0ԟn[c>MI*|ۺ<*y,터$evC㖅 [)Sn7,VY8CY85ۗX/xr9I5Ej-3|@ BJSҸvkpGʀmIylIy*Pl=qiwlf~[X Q>Ri&j qxD8JT >PKc d{5oI6:AI e]TN[H b I.'F6DўPz.gg뚀?d3wd̘XE9 @&w SRp<450ʃBR^A$e$HTQǹ㔊^y(4LjM$rAx<1\BVV|jKS$,:kUAFN 9*8)ԧ \8<9n^'7]b?_#m=GHڑ{ ٴꦕtxμ$A?rC{UnI*dՂ*-ߦg^OsO'զ]mjԅ!I9 s$@M>| Zʼfف#1z 5=75mc'AjFx7h5+ 9 uWW(zZE(y5ɣ)Vj|TYq-88hdȋhaL9^%|k e }F],&ykaN;ΓJϺgl_ x-w ?9ǙݜvDk[]I]./2X~HƵIkk@9zAK1F/#֝[n m ny?L6fo}7VH5J/&Y@Xjb̓MUb' I ;|Ѻ@i y5[ݯߨ\qRhvoٛ)OV'kMk8e\LCMVUϯn#*L܈JN97T?.|ŮWUn=/_ڰ%NK<5JEFSBSLx;y=]\3"ͦUrGm[ϻ8Ui NY@~ˌHt%?- xs=So{^OIQF`֕"c޴/No|#O.яd禗ojeq׫,Ŀ[ /4L")ֈFY-a\/`rvUF|/S֝;URmu p 8o=$Ce_k_ko/%^l )I(U)4X\#/S @ÁQ[2n9Q_l٧ZyZ<0Ju*eײ@.;$.=}ދjiOzvLl@wym3}G7nzꅝl>xVc,V30$fV, BJrq,>|r ҠtpB0Uatr+X?{!'g=4. 06J4Y.u$ M|"" D PɍBw`Ho@כqJ:{_I6DN>p˪'\□ ^-08R S?Vً-5^ S%бAJa.ݣ#؀jacs[ɺ% j&~2W5c'fZ|ZK-{C]V7htdț~j4#y3o[!;t]x]dO3/f2z u*PE8jȫ9G:Rfasx )bۨrWE,eR~RaqZ0/{|P k?JFi>4nV6j]c໥KЄ-tRT&&ٴ!TvrY߹O= &fRUIB,o^r~98:BO(Mk~I(Ya4 +T>(#!Q[L0bO$8ɜRur{c89"0i5e#F4EKٟ:ot0ĥ\B`˘\zXnJm';Es?j;覆J Yn@tx,Av+Kd*.)|C !E%c&&䂛 \֝~ã'_`tn|X.b`ZZ6L6(D2V`T#GCFe-:@nW~,7Tt\C#?i6 P:4Cb8\A%X|QZqhF-ø_au4ua$ϏDes h(MԡJ cs M^rv6Z4fBE/FDpWxEhy?H$7xQq8غΦ|{"3 ĠW2&,XHB) 6ql?{Wre / ^Y C L&0jӤ)Yln*nr?6X}9Qe(^ :_TjjsbԂˆ̗݀,+]i)x5Q!:3y㟝n^.k(Swvhm*_%P^qPJ-+eSz˿An!]'毃O`.?UrkMc &/jbovH_!R|%MVRv67/- ~$\H1cm.&(C‽rT& 7tݔ}~yϷSɪo&Fn|.|S΅5xH9pZTn>?O@U7`f[2e󋿖-^01 P.d\Z.tK:*m3%BOYEtD|)XY62Ӯ#hAA[l;VvV&(p}NyQ`lhq9zsw3*@?.wwׄ4ܪKTx oYy$z,qӿDa<,Gؗ%Yo uJcfدs32@uoi zG7A޸͕Ko7=N&:rm6P:mO`<)^RKׯ"(&&bǍ-8xBIuw߻'X]* NP+>rTRf#!)"PDl2 d ם54YЯ|A<^m*zhcAn;Nyeρ*dK> 떃b&~: sD NiF$ Iq.Dȋ ځq\kY̕* r+L0UQNɥdoM)z`qR"י^g+:e*`({A2~|IF@-rRw2+ ˩^hԹdmLsBM@5*lqFg?8B cyԄ 8j5^0K?oW<)5yw0rI"q?o'&n/>첽LJ^Uʛfh!'-0ʋǻNFά%.xȍRK}GFI.qJYU3*s{Z\n11 qhI꘭ѥhrSTL ǘQI֌٬ajUʺ:]W]8<]A+"&?͕e?^=kP fp!MVBe:]q!@D !$+u$ D"#_NJo:#f6UAG٬p( ZcW*kminJE U&p9-eqΌԕ!(!dB!HQTo$dȈ (F]Sh TQG,# Hv>&f}ؐi sQ͏]5u;JDdxd8Y %)HiNC@m٥e ֫)׭g>Rq q(Ds3>` $ tA(65b5qHqU{ԋc.SuV%E[Y/N/vzq=721Z!dL !RzT"-hg՞9F%!rs[x:̹8}c֣=Y̧ߦx׻:N(o-:cy?jǺ|۵Xq ?<(X)Xy[gGy~QG F&-DMd+"63oh"B1#U2W [O3<~T8]ju#>J:5ڰa|V璕:[q۸9vdHd,tϡlut*i"bψ|<@J%SfIunjY)~3% eJYedI+-+U5FĠ `B%D3VD+krV;fr$LO>{Yan݅\ȵS-ґWK-B.o5[7_ZIp:Y~h6bK-wnx\ma ^Vt)[l+x*)HJ;k,]9)\FO,(uJ䌛%5S8L6Peϴgϝfb_a,5Zjڑ/o<`l}vc-x2L^sO<e&FN^˘9( JiVRp>>1!<*U8i9Ի4`lvef #- LOL@hz*NUQze|I詤67νi]\xc؞EF)2hPѭY  FrY!Z:Ze O21Cc+c]~P.F#K/JP\"ߍKave n'{N NV?sN`yY*k;i^<ҧ˙Ț]un !1iS;Ӽ~ǡ~RGf+'5/%~+mDHxB#q*%ij?Z$;:$R򝰧Ŋf5 N 󎲬|gM;RsLJ"#Vĥ2R8D*e0)1Mfgږ9r(SGac<o;~ D^ݚVS2IBEQMv[͋ U<rL9b:.$ :Z^r9hɔ&]995\ΎIU=iJFy^&,?6'X{!moGdrřR1rcFҖ z=Wmuu9>ʦ %Cr:2 'PЉ%ɇIVLmxN]KPn CMA˓ck m g%ן>|O,\2Zi<\TP'k1ڨfRV19K_m&S8hmC:2`ďr1$`v$K,5[9#^ x~~~Y5@߼wc;\Sf"U:4"-VoIR!s2@Y^HRQ[Rq/VRэHRMF偀-CE lҦhW¢;,Xkdݠ1,!1f%X25-+K&_*46`p $q+|}W]*T%g8GP3:o$]ty> ]bQdRQR[Jb?XƌUڜ$dgf)$5qbY0#{<$86Qd+hjY@^V8ƈ)V,.j1D^d@z@ک(1Ӌ'_h%xE9Y7 ,T sI^1jQ>LLEm-=1F>9Y*<ɒgbҒr 됹F[C%؊0%]QM{{M{<|lۓkHG+G+E5WǮ۬0 kLqYrò8dWg(0aF'䟓/W-r5e=Р‰CeQ?+Zo}N{n?^-OT-dg%Y~&P8+?֪?rc>ϸvߜws^bп¹ cIږaCNO/.oWe׮v^+~~}rO8L+|q~W@|auO [Bax[۷xoP!L{o7KSkfD)7IMrr_‹c!. ZM.){u=MU?䚹p4\3fzu07O4mY~?xqů^TLn ˹M_O`CcNaU:k?.B. &sb1q@,B5Z[~u*~pc8Du׻sJ uY 3P.lh UB!xLR}WrXg=l|fr/?nca_]uGy>1ZwzeXOɳV1tqBP kW:uD('PQȉCX 2[ń*!׊1L1֕l:N bJ\u!TԶu" R6*뤃>WNAV͜uB9ƟZabvbh7-i>LϠ )n}+I>&BSjv7~C,+φ<;2$؃/;N<:7jH|13YjX珧Xa{ҟuɄbk ŴMƈ>Xb٪j6X%$CT&>1GfKx#sIۨ6U\P@ɺdlg͜Gv=#n3dx!m~9M>e PS@T$2iΌBA!VMѮʨ1ؤD@ WX ?Fp1I8K$ϝ9߶a.Ts:y&:ֿn~Wnh._[zZܾo we-CZ{sW^7||q[&t}<\9*nWʎkeڦO#C+JiI%CXSD].GԔT+ޗ HqnT\!Z1:.*jMTodfndUaa7 3℅O7sؘ7Ow,.0!>;}I>όعi(tCKji^mD 7s Vb7T ,a*gCE9bKbz$8&X-&93* V1Re{?enN糲b}QvFm;`ʭD#BSf 2hZRW-$Xсb֕+YsQ2da[kZh V`STT]=9p_O]n8D"댈nB #a6Nj.>b#Sl):2T!9*9$0fAGeJn̄ : š gRI#[,Ie4i̭D^FXnu\\ W5n^/.θ'\pȗd-8t1rIG@ou hQ9sp Ǔ9wanXL5Qs)F'-(VvMՏ/ThRɦ4l7F):ٔf˦4+D['ٔG6YLZe[e:g3zc*9xURLfPUX% S:7d}tPgֿz.wI Yg]6&VMfhV%pw65w猛 ̋Iκa!ml_5u)w#ḷ2HحN}?͆5&']}HE҃dJ aTBz`N%MlO˾E0a]LӧUz*1c-Po;By}Qegg!}jR)R !樔|ǠIPECOE6k"ޢ$1V:n<ob._.1q=4my{\Oys-̹]C 1s9=R2DN)VUPZ)V& tIO0 vǘQ:}Pt bDFGj TvXRqUr6[ :k3,E#qKZ*LcU͜0K% hD >%-٧0nw)붨~6Om9٧r|ʁGPB&gc2bPJp:OZsbL4&?ǛFz$CbPfƪgʚ8_Ae7UGƌ];fV0E-:Aqj$̪̬݅/PW466I&˥ֱ֤ aW-GCV m­'-Km,YFwxsWiO!e)(\p:\GcUU/'9zxz Pxe; i]T ÏQcjPFŗKvK斘~U~Gσ~UGӑ#J.MҔ.LDݳ޿oW"[eg6jS3ͽmya,r=س*SD./o|f`zԆl?`rl}?\5.h2޻GOO?IGYm&~w6ǥ]49ػqk`83\=h\7BL-rE`7-N{=H vXRS1}Hz'W^ipFQ]mqqֿ/)pz24/]<ލl|, M^4wt_⿧tD뜮ҋ-vWLJ U)EtGצlH jsޱY6{ӆz5߾_2@u&Y} oc &җӊ5{Nn>ц'l멩G9QzuegQI8Ō.z)*: .$/]־[F<3wV#R/Bc(lR^h&\ #dP=L)j]"E!W82nkeyQ:E5xQ ~z`V RΎO2-"ȕǗAy$ 'GWȺ廿 <؜RȚ'*W?/~oPz$^\)28Š-l`DrB_lpGs9#+<"wTqa˦U [ne=,v~{ R_}6Ϗ'_P{A}&Tn3Yw~Q4~76;C3w(t/;q̼e-Kͽr `se5$A0LJa OѰa"mr1^gա >%y|{sfc΄9 cӫ|ifEU& cI!=I#*Ӽ 7o+c.7ivRif!2%h- Nxp=q^_r#1y7+ jݸC)nI8XRaN pxL`dߕ*{O%a:A B$$4)$`wQeǐ?J-z"ur)\ІH' G2pØY]^rZx'aPV pi|ExsPW@envt(HP}ոoo`")"MQkKH:6p/s'owvu¡KO"I9ouq|=#W=8j͎{T2˧(KYnR:9toje /}x!oE\к>m݃Z tXibT8NT~e^ԫ:?py}~oLTs7,ow&ףZ|WoUiURv/Თ S=ާ w4uyjO=(Jӫ|}-XkI x \BCA=!EL4ZBK+y%-DĬ 0  6p^2A{D6-T𿤤O*FiƟ]9C"e`T<NJIlِm #?}.m5->֣WRf֖eߠ5o$D3۲6z޼pRY;kz\IZU$&SW mC"S$Tׯ^n }sj"fKGڱvqM3-Wh0X_sw=m:Gm{[ꚎۤϼS)k64ts9jަn~՗PØ:W\UApa86J@N:c0 $(H9wX# cC*2M3ՙ[3w*u#MMpKtwvТ~xYܟJF3\U-]~x [b.bX|\[C rGRgO]^ZW*lSQϪy[sp;Νl;MPUxTߋ[rz;\5;9x}g&P9'Q 8I^Eľ*5LO/gf)*5?]ק]UcUXr5:ߞa?ۼ-H[[VjlJ64p)3wS^{}͵IGD8ؒS4"t.(|…Ddhg*ʉЊ&!!"i˭k('=(aypu/vm}ތbu!ה&P9aȀQKE @kXȨ594qThW= $C~R\=d_b>cbNsݣv4%hjL_嶔\s_#gZi͡r+*xfoѥ|k1dS(2lD̈ 6vALJӣGΏ">i2'V pE5-5E\1 Gu |ڻI@F3j%h.SL1.5.'ZZFb4 [5)"$ۈ#$ʤUb:#@FѠwPewUf-4FÏۀF{K P4} -+N'̒ة]cxi VI)ABCT{ƬCMM+k7PlGhQ"Zk@* 2C>(idp E*nŪ&qsc}pӟE6E#B JJ_7 ;~vvԚ+ԼC% L]{Ko̼ء.jsJ_N? ߲?$0i@hӃ>̧[,Hq(ms? ސ tIsKo,כot;}-:@G.)Қ@$YStX3D)  9:e\θ~xnn|0>]е,~.ۢk,77> U+=ڲ{*$(x.'s1"%/dPr)'Ea<.M_bSCPFݘ&|ԍO69. s(2way28`"lp9*)͆ !gd ]syQi8xY憅 E%̛$$b#fi}]1DO+e]şŅ ݦ)5CSa 2@A#X4 |yX8yνduFcE9qhP>.+2YY B:&`u\JJfa۔G> =)&.2!GN~J{*{ّojsS9?>@+xCfFѢw 'B?iUG?2Ӭ:|f4X(ȶ6*`jPJoq*%iz@Wz ܳ0A} )zS>ٽP6WP1;[yJ: $CǙn9L2۪p`FA;(hy&y̠8HjpUמ*g/lPR1l1!X%L^ed*p97\ ҧhQI<K' #rnKAr8:/"b|te{#֒,̛.iy܇o]艼#!7/Ϫ5RhlР<M (%E'5?/fCe"1b/8aY8YIm9d&"` J؀&X *1$9sc!үU'LmN>q>s-[Vݻprhi GhsץfĠ> n￴n7wO9of/~|7f!l2bn7 oCNߡ畖l:{˜)zvo8t>4 n#X WSsy˦2nsT*׿i] Kr}0܆bsd3*l l R؄U^؄J M^`aRMros/~WXD)Y6ؠd9&%6_"ncQ< RA*-y 47RUlM!bEoNhk"s:M`$]`d;u1ffk R&(3 ⛵*sap(3@F3ef+oVgǍԙwvQӎꞬK5]Ee͛ Pfyυ!g>Y# @y?7Ol93T3g}J P,24[ؤ*Lc]b>մ9=RVYoktvWPmB=)ٝ. Y_aċwvAt0}ʄ#,Ed2툐 \Ha&À'f1(Rփ߇sA wx6&hGL=q j//R 1eq\ +# E7ҽz{~ ?1+pJ݁nAOŗ/뤱f4JKlahn4)bpwYtFipv-GAA[KLvW&`2K_]~vY.t޶'0W2iy| jWS97>c1s1޵wH-s?M9?.xKǣ%ZE4:;s*Wc͗t7} ly8Of;^\nz?=h\wB 48$o{=0͋j k 5[s.9_$hg܂v~mAjޕ~Y;v1fq)?;RՐ]LsSuW*g9kwn1wCA `ǵy:#Iל{S˺Y$Ľ|$#:?B 3'"t*$wK.֕S| p1;YEmkhRR:6qne/IxYBcⲋϝ*+}NưSNM7r˕7l37 rjx))J(XVaǥg!!*U8e=3(4. ]Re-HYVg>qaVUvѫb i7ERطTv-V {o1uí@P8OA.Wх~f=ܼZoM'Խ^z?. 5_'gL!y;Zͪ{Z(m&EH\(onuc7Sa/\wKNQGnĻ;O e;ɏLrt.Fw9-o hvywF^yu7^u4'g2CߒfL\LhL(g|Wөe`{vbT)oG3m4/x0;KWY/ˤ/$ ]iG1%G(G/50( bGD|ѻŠtMUiӳjԲ:fv9%-ѕg\ys @ | F->.% .n$Bh"7T$rJ4Jc,|AW$sJ1t$:e"a n v ^kg}z+@ VZ_]<>EU3gyϒɡqrYWGkQի;8)>T@v! O&Z!dȧ/9e/kO|Oe8=lvB~2 ^\(I)o @| 5)לQNa %.%cl|/Fd58(|q6jq. wG:DDS0$@fccp-d%նd$t|3*Z/N?6GJ 8(1Y2$ dWo? K^~<<0p+>/W|u:/|[9 d3ϹK~T"bsǓD,7*T3gMʬn=YӸZ-?/,Mq{/7=X:Gݻ%Ύ%U魙Fl{p*<#y G0P/Y)DwI!91M3Q_Vw׬j_YY[Ů3]p2>v֑,V=ugP.->L~5`l$#oH]@pBR%r6}tΊ{FtnS>rBP"T:z* PPZ9Z$$ R %m$,rd|Z;3B䮋Kr^m-ߜvusۃx6AP,Jz-$xG]fwn} ;ޔr=/yL/ ~ܱb7kο9/ΗYTE& _peRL* +:[8d!񨵹 ?U1~ɥ'nD1IYZ%Zbq O#׀=d$-#Rڊ&Yf-RlEOu42QǸָDphiJ#A԰G䎭x &^}ûؿB^t7wk<kRrԥ{Fhyn!Ҵ(?rPz+X[B3ZEN=?Դ !JP-+,j ]emF'JWRҎ^"]i!+m ]e2ZqtQ* +# 2`g *RvtЕ~s"A㧫 G*"KWeGvJNNt;wTDWV טeT:]evt)B m]!`*tk*BS^ ]q)-+,(i ]e-:ZNWR(CWUkH[ w3J;zte4Ye. ۯsg{Ԯn(=f%HhD-oH:_W9gwq6? =@eYG.q6W'1%Q3F "RJ}nq?*9*!QZ[K~+l{3z=Dl+疚eT2# "޻z@95hf{ s6c=6y9jzvCxί)]]jMi\Գ=J˰{'PZ**.p S ;cO{ k>9}RN(v{tfwUo~p}͓-T{}ND_([5X[B 4ȺyzYHR gpsSQ6ʆu|bdS(】ewcd31_SZbg=.vkT[\lD+8u;ds_ զ,= 7jMH N2J}|te (&*gBƺh[WeѕyЛsb _o wW#G0Vin(͉7;ЕjߡǗɕi]!`5t*hUpMUGW/8UH5t2h5;uBȎ^ ]JɺBǺpek* Q:zt%(#D2`ZCW섖kep73"JRX+y{>g5UFĩUF}jt<GL>s!;w>ߊjDnU!T3Z~K %tVKi׶TUghJ,K\LՅ+T6TavlhZ$U k;:ǙUAXߕHJ~BD@t؏e0.:CV\ q"h8#Q2&>(AYЖIQ%IL$ NJ4fzl{--MpbXb쯋pN |? W1HL451sD Dy &g F8wz{d8Y.T?7ԃ2Ry>YUKhutuU:0f[ϪUf2Y]vvl⃝\ͦӝ~э ^ gb Kwwg/LZ-zHl{]ܾSQqk=1zc,%TfN#(f0hLDmTQ#O4Q NIsO1D1.@)EoXQ]QF u),/u''Sk ar _W{mF ķlL6hSNnǃMT'5euG޵--ec<]֜?^_mxOQ Д0s6QcP9 C`MthHNB!eFU !9GruH2TZ*$X1$R*&$18ۙ9[1.lL2B0BDžOʅKDol_T\RNl8 ;?ɢAѵdn* ! gA%0*H%IІIuQ''fg4'8˴ɥ>Z1TBߎhF&I 3vcp3B< )ؗeì-;,؇$>Eʂ%GTgXƨ@uhD hHͼT5ʇIHefCFːBvyT5M,"H#r.$G:*klҩ LˆǾfD1bLjIhـA4h !IN`AZ Yň &8`pQF4>}(!AKg\p^CQRţTZh36̈Έ/#C^s:oIɾhEbNjF:ùjtNJ8&:ʚH4Eن=x:sCq*|h'eTY*ȭ_[ ~}9g*f%PkE.R%ͥ⧳e> o{>5 V񂽱W5(pC?Fw9};_dαz`v{ā^EVM{oQzK:'~-H"üd MzpwIӯ(Vxl6zp=,{:ʀu䳢 `OWIL-yeSяA'Yn!8)`8 >%,x# 5/?{WƑ JWVR]IK.*׼(+, @b\Ki]2EO w^T,OBK;>n};Qb'>ɺ˘ :&3`ă Zm&CIR[A)H.nZ `],>1hȒ,RٸC{00OӏZ"g7`/za >x5yuumܩf/R߳ A}N}?hhh5: 3 :*59Twa9h|t3TFmXڭŔ̭A=.+b BthLGR:xeL9s%3DqZ4c+kEm5WٴL^rV2it9)Z >EtBLhiZZg;^xi* AOh=tc1$7v`w}|]Z,xՖ~^ͷ\,[ ;Գ9ԋٛ<\"U>pѠ0Պ)ۊ1WbP;RB/;%bGJ=~J;gTp,D>rJeTȄT‚޹GZ}L?ܬ77JYvݎǗUly>< Dlo{dOnnr}, .6Zw6FwEPRKLh^V9o25ST,T]͒oK{-\ &̉LA/_½`|9kP :*[½JCop/ CZMŞN./Ӵj&|rz`>*dRBP![v3rO3erv -βSvA't6L'>ȕB-7<%бt| (A&Smt b]X0KC䌨ML@JDDR٣^ͬB_M./—ۛ:?[ь@O3iH-Y_VY_T^w3^* 1J':y"U”S^aP(1ܥDE6HO=J%FYËr5hY:߹3rt> S])ŜǠ0)Ň,T.p (.ԜYkQʤ,#SlҗPP-DhB],v,*C" F&yQk%bK3@˖T*rU;_TWƨf_ơ_<ƿum2sO3IWŝِ>2bJ1QuE*IH! sv4R뭯o8jiVr|4^B2! ɖ %~5TJΝc9̕uLU>Jue!)E-s`]Ԃ=[Qx) edĄ# 9ffd0f(!~z"I2w)|RzÇμU{"7.⧏D.v*)%V2˩<;?ٰ T>JA*E|IlgzPf?e?^d?2Y8U)[b\bltd Z 1X:1DKH?!Lngڧ޴||Q-Q[YafwLh6[IM‡T; +WˬpȠC.棷OO9t9f;WFV4rQi-c救t22v3jƞ;&n硏Pg婷[slvaMߍy|H?IirlFN.&n~C9~G۬K]*aUΪDYlμJK,Ke,uܝ{&tl1m23!eĤ\0 D@=Nyk]paut~br^+>(2Z ̀yŸBO6~D%H9+HH2a4cJ{ik$Q$BG a ' dF0Iz/.B"gIHdܩ3W`Ij0 ndb&eF.X h E B+'<7U1Nr.9{$T$}M+!p'xLR۟NQqW6_Ʒ-3=g_A RP6(#qYrRȄp>R3cRʘ3)[_;0Cdz}*zIcqh$h|8d ln 3%!+!`^Bn=n4qg_ C)*H,E㽏3v)!Y)Fz!kOc,l1x1NYzϾM6Wi'QG+q$\ˋ—p5vC<(mL\o"_sW~ʿKqǛ?׿L ]8nRu]_ԘW?Qcލ%5.SCq1s;.dpF?g7BeOs,D.4C%A47˼["9{6l,Aށhfl{3.#+ŏEɺ'+qNg+})]B?qOc#;xĴ^MJ]y3gCЉAXŭ]KX|Xz3K:nmv=F=X=j]@^Z~AjmO`3|W`3%n̿8$rG;!RCY%ZFR?>}]PMr[5Tb7-tۇD1#m}5if9\+Q9W cgm0!!z,̌`IhLՐ;e9.6ⶑfIjm)"- WN9^hL?çь6i2)}{[ʊa_“1xj4+МlW;I~&8) 0x\:qz!d:8s3( .P.x3* | tN)`^H :zeO~c]۝56۵A>+}S|R˪ }P*9ICf2.+3q+VWn*#Ifk-el;ȖV#2+*+h)s\{6.o]Y>3 &32 'S I+B.#r5Q"޲*u%}kfu83CNYٵw;u2خ]-㠺oO\y/f/uS\5~v&+n*JOfy3G*=/{W zTzzazdkMhrI:Ǭa^x,ic<t۔f'lC/m %c PY*z2N[%Xr3rIrah<ѱdLmO=7-=l.|;]01 f'«v!i(X‚d BRP YHX`Af 2)gm9L(!'\>32(.yaIW~Мrʲ51a^rgEOM+L"\7-vq2>1qB!Y%P2E r\0nj ljQb씃*{$S\[K ~*8t6{3 qcPAY1:qWǥLnkbk_fbdK`YƲԖekY_FH2/dZWYHu~+$/$pg74r>ӿ=Zydj-'F^ɓ/jtKRK]xYHYo^K&۫.ԭ/BQ?&>Z6ҽM ]Cy,-rÕlXX>ot|/7/Ƌ/XU}>s}3`?fZe{ɽe+^h7Lnw9[Q8zɭb;p@4}j`, /9UH6Wx,d69#vB)t }Gt 3՛iiPvDTCNǠ it6&8+e| je> \:Ʀ 22P(TɳJ,ѷyAz2-cB(9ZeVngZJޘKrjEJ.BE*V~F/\ޯy="׷嵬nH,dz U*1UJ8idB dRZ:B1Pjc%dP+6bNQ00: c #IȌV`pn##FHT#p^X~7T9M4Gy},t{bfk6N R$d,&:HK.6BP0Qa'/0RM0knƲMj6.p媼$ݚڙN*O[kYR$9v}dDm1yM6{.?*G,++*; Q'% -j4@]9gǧW`ڟ`b;^:sPӢzRib.bFx1|+htn;!L(S1aB Tj.KeΦrzk:{h>872wg]|z588A}m^tMG F)8].wt]օ`ڴ8qwuq~RGjJ1vU١tɝs2%}X| np~fr9p|fOh wx5[5OlSFtS4dsSlBy ]V(#Jcv^"(PUXxUNj;Evܚ A]3in]b5wۚ|S7_;J# fޔkٰpK6^f7Ny߽ՇL?͏oO>ȗ49CZr+]5K?JjM 5 Hg6IIDBR!._cr3*N);t>ڵ]pBI2v?;iʪuPYʺJ\LLt'ۃQiIҒhƛϦnv^KAlY;'E%CgHHW|[[-JMlzTy疍 VT9Ih[+9)fުvV$d(\aQv '^^\1N- W|hv^ʷ/ zTmZ ,X>eEt^fIiMF'xW5ꩁ}/r>k2憊Qܡo=NJϊ?0>+H>D<+HVճЍ_{xV|ӓR Uldu4jhwJ]=CJ;?%`i2t5jhӕTFygHWJyԄJ ,d%3jh;]5t Yk3+5S֘}ҫ]=Grb^j?~;Mqvv>mѬm?UQ[Rs3"H UL#\9Ɋ\\ȓ~M3w![pub|9ɵo?Z!/VhsZJ])K"[6Ծc3& .0ti=`>>E?_T2|{ggr!Η0W? u{µMþT֦d@_+ Y9fS_/H3ˏ{eQ˟}7EOynxym'-$ݘtiGCߖNuZ|6-@^t5!G/"Eɻ4I),\ 0XSЙ?|Q-#J3h{|w \xq\[mUs~Of=}0I#go?O_(g_E?v-^\x`kBmH[ڽq ?%65'T4~:1kJyڽ<JOјX՞\~mJf7~]k+US_.r$jJj~iFo8莨#;#Jݮ^xx۷[IM7n:K\B@N M[r\{=/pބO[yQC!=YӲF4&,1CM>pZymӋxq,6Uc'\/K?o̡/Nʲp[>ӑvw[z'\v݄St!9`@{,.V;Y 43ڕu-t׎]Z2vH9Vv*`?e+i2Zen(as, ѕ3JLhDpIUC+~]ZC@Wϐ1Oi7y:hK<0]g*qB&08tE^Kգ'HqpZ~"zJO{V #tjhBt^U~*tZwj(#]I Uldb*tʵ{CWI< 01jp ]$tP򁮞#]1ĕ`jOOXtvftEJ{hJCi:cW .UCcW aYҕQV9'Px+ ^)7#K6X M7z23 "@ϐoLwBUG<{UPCYiHM`&CW ̌hCޝrζ;]^jI3V[h^7rjo|vq 5PS5sq5Zf+Zl2MVLK(Fk1D&W>ZBRHJjOKmCYdU0W+ i u6iHM~D aHB:6cR R2bN0'BVR5S1xFiѢk^c0=KjrتeI%e q(BTp-wlKAF#a*!x34fxI֚#'N8_S Ai&83iG|&Sphb&ІVٶP,IfxARD&M~5T`hDV!lM֠F'KJm oulIru~Rd d -ɐY/`I6.FIm(ysJdE!R$ʰ;QUO1HAYz%y7&&Ow7w8u . Ƒ֑%~@_KWI!3-c&&RҶ͊-%C|.*EjoI-D@" XRŤRREQBU"lR9C5TeO$!(0fњ ytAڳ$J(JjsAvɑ*vۧ0I,eħP46AZy5i6ce[c,|m>h-ĩP mgTVNKBQArXʃjMn]9 ڢQQ \U&!^MqkP4+ٲsL)eU\`TV!8  mYlsQ-C*Z&x@ri+w%CAq%U*dJC` E`+ N]%<&w`68I( \dcdcY#TPC/U4%^i*Cv:b@uPaJ7#s Ny. r*=d`j>tLF AN`EţE 0HiDL(22#ʄ(C`(8Lt~,[h\h=ŋ `hEE YԭJx$nEf06*YȩՏ/EA},ӢmA5Sbe%1Dw2<}nsȓ\ X׆XktdHc#.ɆpHYȋQ MޠB-> pGUu $$BkLFQ{X4j6y*ZЎ*!vp 䁶:AjxrI'#Xny=f @bhTӒ5j$P4i2Б9hcEpQIb&&D(&ɀ!RP bAEFˌdIE!ݨ2̹HU1,)X_N$' fcI;M{R!gѝi ,FVkmH@8lF{#Ƣ"]!EEgŠٚG_WY xZChTRR5- Wy"ׂ+fp  %D_ &8K)'STZĩWW@ٰ̟aT6ig88`d4~$I`0pԵ&uӵ2l&GL-N胩[ X4LPkYC58EÔU >v 3 FZ~7[u3|On0(!fM .)*lP<@ads A[0Ő5 waA(yT" fi 'OpBkoD ?z_VWL"y>\Ap٬ oa"f@$jҸe,_߲Fr緻T .>śNZAs8$*\d! ^CaVXJ0в B :@\b |J n+`6Ƈ( ƞZM&=HKLCR&NГJ p /@i0iU )TQWUjKG0]"̎@IK "CdXO]bmO p)- fi!k]`!ީFq1yW a^j\OrqdH\dF!4%L$T>V%Uӓo[1e^湄8>jؗ/_;͇"WKH+&1M\~y=3?x2X,tx<>¯2l%秳>4aŴg3,i>;a}eϚAPh&Ly^uc8Ǐ sQLzUΧ)lIOje @Oj`g*gQ raͥӕRuTڗGI3䠀f|N^ *ӭxqU@|ƒ>d|eb)L+%Ij{%}汪ZdQ*RVetN=QN"Q'?•!&m})Bʥr9!P*VNԝRpo;fUVFY_RJY*xSI uB)URngzYY6de.Udt偝QDe{>`phmb%eeݼӊ ?+WfEve+t%1kkIfp厳6Cknj3ct%7+ItخK]!`zCW׋\vek+]!gul7DP QJ$P=+wt\Bvi'RB^ѕ=m3Ϻ! *uB}+-Gte?KW'Ft]!]O~tEWM P:戮 Td_p?v J:X@}xҘ4V.w}iD+yiQvm_gi'7eUru6fhm'r+x+ܧ+@kD߈J'%՟^2F7 :=v#6C);] J]=J@fp ]!ZûNW1=+-~ azCWWƻBNujJօ|{ϱϖ Ѫ]#])#=+l< ])smYt R]!]ir)8?d2F#܁LOsc쓫c7VjRYrU3қ|]4 CyO:xrlh[}9uBK=i˝RFi;~3_zxl5h)Y!fEoɚL?ۨy=&Y+ġ^6ryBm{b#\bZ:(^qի f]o jޛ7Vv~EQҋ+p]y\wpUo ZuB?]v>dsvNW6z[7؎?ZϘ]m0:AWzDWz=+, ]!g<]+@99+ճЕVp#B6䝧+D)<ҕL[ڨ´^';xl?(_`{BE/]zex(O֝(ѐ_'wC ޮ~7I/;{#)Nft/.jrAҾ^=4ݻFF#g?}.J*߀y~`G+ZY\{sꁯ'o&`xy9gܾ ?sZwISຓWO2Ny fU17N l>ƭd=+l 9 mvy7ܛ-m=w^g~m mY@1L,)MiyY cv6ڻ)hkc&ɤT<'2Z297ZzשּׁӪ> W?N`!|5(átu,YU\z8`mFwJ*(m* `R89\)`FM8?⳸~74r}p/g1W~7oǓOvezвߵ~$+<6[cpbSϮhQŁuz '.\B_QjO0?^rttj]^x|zvYx6 /"Pó>/~G&/Q 6unZ`nչ5oPzj oF D>ip):a4 S'E|<8\\,岈Az2CAlptą=dU+⓳ fX|4kv-5u#wfˤË >|ww.[쮘xWXε8 $2XeCֵRʘI1dkYBZPWzĠXjͅ:DCNֺVԪ6J(XιX2YAFk }G znVF :w[Q(NS%NGJ[.Mχ1/[p:,*pql7,#K8ra9_=jqյ{ Z!jэ܇?1Qڠz(v>("MDՄ޺yQ^W | v-xF:2TNU<Ĭ בoun{u;^磰lL'RlD$HP.Seˆ¸x U>0ᅗ.z6琳ѵpUSR URme8uel%̖έ8FRv~ 3ux8ȁ+m*\yq #K/9feEHڳ)3lt5+fT=Lo@J9*$VH-Hҩ偺58_ԗSpK0(sʻ?\{zՔ[UON7k^W1x>8 Q R[p7U"R٥i]ͻ_xSq&pѧ IVɨP L?{WƱ Om̛hoS$(nܶ 8f)Id=HI$ej(QDw<3)U59=+>`ژ%4R L΋)ڜMk+L UqjXXmd2€G•D_dίa1oA|; # N3;J9) 4w]8LdaecQ0="&htk#\4t;f]8IHQKb 5:Em]5s*.(}QCRA1$夺r"q$ Fj")ڊMU0+m0p4db(!)jD Udd+a5qaOlvSSAjPD4 8  8G(5jH`! `.)D:a a*":R6@qR qhe4 r}td)(hJWeD&~DxU{yOSuV%⢫vwF6F'[RR^kL.y]bs`5Ϙe"3Kͥ^*kӎiR 1AJ̈JɄ9Jn)HG͜Fb?̩^8 FfGX.-3Qjz5iBv+P©UCHpq9&%)z S6=Q4M̀u %8Q^2k@lQZ=s1[`*@xi*Cʒ[jh3N J72%-bf2&[fCs1~nS:V.otwnwb築^VKܞC~_ӗB~OcPAf;C{geR\V"$/LixID8 "`x`&* JKTz.#KΒ *̕tB^,uLʌ^ h&Zy񜤊A16Ug?0L|%jNL>ퟪr-Y? vPݗ)}Dto&W[gr,]1_9XL\PFh΂Ya7gBEg8̘͙[/Jpj:3-=[g@ yAXr}XSJv)Q1+t٢ ;ÌA 2+!`?w3#Nj t O&>MAh-\҄ %01XDf}YcX&ɺI-هġ}B})ٝR_ d%Tݽh>-v{ "(hL sλ(d䉹`r _xW{OalњI9DS).̯U޾b蜂Ɣr\ɺ +ͽuqz_x Ro蟮L㚄0Qg7]̀_[#I.^|MgHկmىI؋t\aLW jR~0_Ί ӸGQ*,-KL~W&e*e~]"]}3ߛ-Xg!;\]~rR#+"t|׾]szV c'Z݇&s[R謿'CЉO2>ig[m3,~i""4:_~ke}Q> i 9$oj_>r U}@w7 ͥ_/7rKw*;GTWx i#zKNe,Gr-}{Wgoۼ !էiR1_IL!I;o8^1.-w@tكW5]rwHԫk_M{xO ׯGx_cn頾'su1j6y+q ۥX(wh7l/cYeM/B԰d C-5}-WN"w/<]al#EmVgO"%θI^ǹ9hH,`qb8<*mU 1,X<;Ld0L%D@Nt^!kZ$66^zg(!2">UdtoVl.8IlB"\"ik"@] ں-}hinN:>x愪㙓PM 2$PIɭ!x^Ѧ#tCQbO>0~Y8b={,1B>[fpRŘceI*smIK2] l-w׎u/%Q꾂 ܫNo`j}[ot]IU}n ҍO* Qw6aQ` u*AS=]ܑ*VU#Z?Fa*IkU^_u vF]Wb/NU]-8zߖt09abZ˘BN) f4m1G>O|_ԬfEhT%a㿕[Ëc6<*5=" ڟo_ѽ_uM޾tg׏ &_旳8qy{sTqRdiAL.mxͥ2gCn-}篾+h)+(kߏpJR7C}{nu[[x:b%ܮņe<ތ~~ۮo3o7XQWMOny9kF`DDom{},rvS7^.Fϯʒr;tePo;en,{w6w "Tָ$>rNBY7<%,191)PTip."pԏr5'xZx̂V"Z) ^y_8~`m}q?v6X$9^œŐFW~MfK?v $PF mFlH@}P\'Sd*oQJCչ´RY{qJ X`H޺VLTRst,֮;ZMRaLw-iQQ x^r'E@YlAa4R03zrsL!I7_t`R.s,GULDK+x^ S?lCdš-yU턳LӒyf~= 5YȜIC 'm3DI*[zdT ŔbF]R%'+@a9vBslBqg:Uuϵoknݭƶcа{LR _QFY R?6L5>Yl]쩊K..͞t\3a3D.soY(N13ØѢ,Q"Ե $>Z\by>)xf6HwSZU8wX=\ #R. }ukE+MǏ3U޻8cXݯ>> 'Lq|<4)Rg@2LN$K}f, 2\(!-\:1Dιu2ҏltUI S* ˬUw qYd.KTU^/tefXMzJvhbv}1=ƃt/g̗omK\pO.jsf!2*KO/5$v)ZWH\`!BнI ?.Y|4uӵDay]{vΕoH$Za67яtf2qO쾺½_M3i=3jh.3=4p}w;>˟ Zƺ6Ϥ}+\s|7.>7qT wNa^q5RJ:8)IE0 ̠k({P񃣅7[D Wx|͓/-YLR #_1k|W5n4x&+Kb)S%h- wo/^}ng~28ْv=ݗʯ?t61*]Zg{۳nl]lLL:aǴ6^{6W d+epLKL\DED:#LHRtϘ-4Z%S0,fwC: gkp qQoη|foi-dsz̩^BUT jÙ+Iѷ щștI`⚕Ȥ6H|wTg+q꟦5Yoɓ鲤~`kn7Ay,ToZ$X|w>_̮RMf6ɘcÍ ƫ]"TX^ݽɹ{r'-%䝴=9dM&}3̋OivrRЉIbFU(\#EKnә{!UD# SeL@OKF.hy6Ov{hS ldpxw;_5JqlՋ8I=cEZO=d`8 fYsΨ{Mӧ'pRA 7hio*dY!j: + M[rΐwI~[G%`a12"+a6!><"EII$Kbe6,2<"mXZ(VFLF`|ۨ{؝S9Ly3XMa^~To{C[Xg}D>}o'g܂PV"Tʄ@OV)C 晁Xt%eTr_nqlWUm9}mr*x]3scPAKN] Ug}~LьdlTsHF`e1UrxBY`Qa0wA.u\{)6 F.{ц!2Һ ukf7\^ݑջicN 4hg!:ZE"4AAe'Cf׹Ov~pvtAݻ4>:++CZk "`XրU5ǐw!eolRguzcNEZi;rlˌ*6 ۖ|Ɏ̯2i2Hi1]v5R90#f|=#upu= Z8p5z*oSHĽ*@\%'F|\F NkTF+jU(\ Z*Tͤy\4H-VUf^%'a䷑دܴvdc7{2ӝ%睸n!Iav4<4aA Z3G-/fu5s25sa\*' +.4UtE( q BhU ]Z{"fo3^_00'S'P=qPr/{ЕЪ/("+Qt_Kg:@WgIWBhJ+!e%bUm{JWR JJLBbr[ ]Z{"J tutR:c +t9}WV ]!Z`PVtu>tqLDWJZB}W;rPWHWkޭ8|UB3;WSnyI}vXbh^ MZkNӄ ip} }O^fM~٧ꍀ$o_jICclv.IJ/ftTMPzgU5qw7 UׁUV'ʅ7xDأ00N{AOXcҬTRčxo^N{U2k7F}õx_~󟞵um2v̦r/b[O(35L_LP6e*)e]wr>;9`Yfh7r<\A`U퐐${T-;kZ>CNkLʸJ*W> _D@r}DR3WPM Cp,%F7R!>ۂalʙCp]1̈J:CrJ^"+k+DQPk$!2Ϭzskĩ[+O`~?D~(ۻte:rɂ ~~p.f퇲y+ B%+ "tE(ΑV\;] ]QDfJ+B+dPA]#]ܸ  ++QW#Pf tu>t Ӭ`KU ]W͟r]ZNWRہΐ4ph; ԦkR‚+P(&TA4M_AwECӈsw&J4}4mDWXkQ ]!\SDQm:V0^RLU9Q ]ڗ^{p<]!Juut,U3ĀpAR SֹJc@W ]gV`ai^ۻsS'VW{p"uJѷGv]Z t;ɴ+ -tE( J8+;y&}btB`b3+܋蔮0b誁J+B{OW؁ΐ Bbbս q]!]QtAtE]9 vDWlP>9z#Ư[U|trfuQC rT%S MZNӄrPgIӴ=QkólO +X+`wگ\;bgnUð!M*٫:68+q*ZJ`{l{:Pm|a)i13pCh_z!6ôs WDW؊bbHZBo3]0={8>1]֞h^(YN^]zDWE)S ]\J+]!]7YSBSB+D,;]!J@WgHWRXmtAt^ ]Z`PZ>Ha^ak ;Ɗ++l)tEh;]Jk:CRR3c +̵.}W@WgHWZ:P%]!`'*-Bbϓ@=^*FB^q~%؅2q\rLKG]dkZzf-MN[ws^0㛥'iQ 4`2U)ys w7ݿ޾}VLOj܎c>QӕVVY]վYjptwL{$є.7:Yɛ8<͝'-VOxC?S ~ )N)}'|*LЋ8Azi;/_I'%7osO ~~Na>fW٦A$dăRԵT8b-CL 3ΘL> {ty? ;/RӇY>v/PYLѤmt>EMF ΐ5 ^sg AOmL'CuOz?ZyԷLK\:%be@6;ˤṋ6NX_?G7 }?zvӁzs˝Ez-fr--oGT |_^b/?`B/m^.R@|6mnH>J*]Qqg>\wW6O>bYBG?LOV"i4 `h4?"7|>!$@+TB䖟/6 ѽ[W-F?b%⍮˛R~ K0.قn,w-FsWCkTm ‹dURrD,aSR ݦi(~6վ`Z"`D1*妔wJPCzQ [YytCvp𦮔S5N~xdvHgsxy=6>6?v֯m59p1t"=z O}`)HWFkt;d"Ą>3We V9kd;#]1\S4?9>,,.MsmONjM5Lש>cIl:&3q:MGK{Wu]%iq["٦=ʯBe?zv,Xo3b)Σa{ Mzp˄OYV$ v߫_6 Cn-ZD~mM~LOj zql*`6Tͧ;qFm5M:ᣩ]Ř"*iAmq?gu({Ջ-6n}W)6y*,ݏO(QKdN.R*)eHZ(\LxrJ`}0>2t4@! DC zW^{$4LwԨ0kF܉ ͣVg=nN^iƽF|-6,ߩBѬWk1HILu+nWssǕ*L([SC *PsVW܇4WaP/:ߪN:ҽE)XB(Q! d`uB_, mvpIlӤS* lmK)ekL2G;靪XuviG)G2t0v\~O7:j%:X#s 99 =I')$0Gɹ^6ک&`t5z@vg+# jt#bVXlQ;󴣦wf Rx矫0|6*v`|my7q7Ύ;]?ƻ{|䲙jPz'DlqgoRu?i,ĐQ`9S^EC"d-DJ=j7Z 2LNQ[cuhR6f:[ "mM@Ϲf<͌iθ38 U\(nQٖ/CZ،4Wq<ϋ,v Lf'gXQnp -z@)'EBP.q2:D0M+3)DRG1 Y0롎/ؙ}ʈcF4#{xzYKnkH7$O!\C-;eD#06N $*mD&c,$472ـApcF ӌ:aUȋwm$;v?e v&XSZ"$˾=!EJ|#i^[ikfTWRY]Ėbo7c##Jb!*Qʄ#ϝyvY ɓEso=Lؼt&&W^#F_6xSTiEC\xGW~WBc6,*BE@\yJ*et6:lq t:,ς"hT*M]4R'8e!im9v3)F,knt ~|;!76/HW]mt;yOb!o=;ht}et58- 2Y_FV4r+ ~i+/<ƣцnf ]N!K\|˅Ka5eFzޅ.bԜlA՗1gNݞՉG5rdx|t3\kq;I H@7:̫<[ˢaGoC琺xL<yvKzG&tm)$n Ȝ )sX6)LAFiU;]pvw/ ma@eHYrcrYF\DrVsaˑ121.TzQRb~XZMhLg6{_>VYTYʊ?]W }){yl1(!mg<{P!KҰ$0=` I{pPr3{#n4 ^0\Ze{Y$%R|RAhC-0i EO,:㲓Bzk}kZX~.ktvW ǡ8CۄzS6;.dKRF(p3%DwU6p,E㽏f22!n(cYCL0f!eӛǛ}Z{O-v.~x8N@smǺ%=w ,"D>Royrb~l3!"Y6+ox7ɫMp:V瓎eח!6Y [ot3aeٕ'gDV 1̤O\BX^ή7z2C>hWZ/N~am JMxR_}h0Z2_Vo*²`|K q?~enE"[M|z M:ӣ8C.FoOsp~Fk3>_'Ik%VYӤĻ)@3E-.;;KE%{Iy>NДҁhGDV'W[B:JCUNO:oӇ<,?̢tn 66qPO1^csךr5;;Gg~/闟Hޘe nSocMN,B>[fy&DXY)e\&夜(m񶶲^nWohU}f҇ڎjӂ;>Z5  $:Wr4/~]sn)?}* RJZNIYWuxkJ}-_}Tؕ=eY>փݽv0h>J ֱig81<A>H48e8 H#vAu:E&VIΡU]aP%8% X`HbO+R#3^W ꖔ{U=1ɷa薎YȜICOf%UIȆ#J`I(6B9uC:6Nt{ݺs tmoUA}8H@v9CUn%+tVLU>YWy ]E"޹s[7k&lȘf- 33hƌ%0duNv{礵>Zdwwɐ1MDvR5pF)]8!@u5i}-yXˏ7=}4]U2-oNp ƕD͒2*}R!W[O6%!q$8~*80{BqcPAKB1flKzz<:z_oBniOSXr呐%PP]1Omp: P!QfґV#NnN%a MMO8mdV*R:dMXI탕1;)Bvv|$sks>Fo|takY·EqT4Q  T/Z dR6f&fcF'a9d9 dZS(9O!iBNX}rIiKO ZYw4dZTqqHFb"HUB:FKKV0HOLrk^gɁ֏̵i1s Yy]SfVHN8,^]koɕ+@2[[u% n&aH$ю^#;AjR4--jYlc-S=ܪbսH_lx٫RQgNJAsvZTR!Q\!>w0n[܂>gW>9?zŋFgL(S1х %`]T_ M&u(G)wp[vF,>+Wh~ QFxqF`03(4<|9|j荑fzJ:$&7{3=S遯YOg1ӓlōM&YDbĝW!e;  /9 ƛYagFdt:r, p T-e {/RTСڌ IRՌ ::G\aD\u>x0&JT.gK:3#hp ~ EJAp*H/X61pn}nCJ-z(DٔR1IM^%(=~#[ Fm%xןuyvT 6 ǽpܧgsS洲zǬ;rOc֖p{DkIvU{c"tvڥI[RGRkAۼy=`}mI/='ӻbJͅI 9ΨJ]|ܕ(NDw/fpbBFi~қU<9j~a1ٛ9Po3Cb(!q՝g/:OXe *_lHkq^0e9o>2gy-ޛ׌?c{,_)/Zt\ͮ^7iy6[ ٬bfv7_~ X Şf˘]-ʟqw3%_;YzW0/~96[sۍ>.]v#q.jv<Zje^b\Ͼ Y:yFjSsmsvzvQgݕKl7_trs}$;Qےmt _u-o{kau:J-cbmq}Oaz#O5NAiC.j?X?*7bzv{\7o"_I׳xuX~HxNJ k2tGJp _lւ/˻}0oav6yzoSF lMnpILnh2d:ϰ6(O4aưɽUgGD'V6uWt?]ShM^?qa]fTE6 k3^\]Rp4hںڵ_W|kc~?g_-|=/dInXҌ$u:3! #!X~mg=!f %&~\=ڽn(?g~-9vk$ភEy89 e1D^#=Y_feP*$_rR nֽ^jj ZFi]]vm5=nΙo7 5/gN./rY~]h_hH<ǡ~BEW)b·\;(jN>cL9+54ˎ;}7˞֖zQi(:g)ň^-c%r.9'.^r(kk^B%~|u_qM:}Ҵ6em}'C.e3i65J&Tb7v:%v&Nhc%vC)%s,t&n~>=o kC%ڤqk*jmi}rKgꥹ̿nKnwҀ\Mc>>Vﳏ}jZp勋.uek{O6i솾wy è <~1ˇej(In.;dUdz(/Ga%4cI7cv>645'}g?===IoT7h$MR!kyH<(yW;uF1*lr1#={7x䳻IN>N-3Fs_Nƹ]:4lt[\y*CYrBas Nk;%`o i ]tz (ymÁNWaӑbv ޿l;wco5DWۡ{mAWt]RL`o'CWv-]5]>Jt Jz]1~2t>2ZRNW b=+n]5MIOZ}HWZh4\%T誡{OW A]=K2Ym&DWFxr2tJOF]-/ۆ0v,銵Z[O[K+vBoƂ]vڼݖvSb1f?M7B S@Jc4 iڑ{l+NM\ ]5np4pBW^)U3# NLfFU{_7v<)ψnKVh}f[_nAW@W;w5Z)ѕ5t-r+k>sonu|wyӫK__f`kfk^~99=xI:p݋}wKh?|rjP_<BiW5.;pzzu}#58k?חѸpͯ/, Qo%Y0D, !\xNIs`g:棐^`]=sˀ[]ُgMO_DK¸#>Von{d#PQF\ dV(?mftn^`Mef{:x;CN^uxצra^\޼H>`scG_dW] JIj!Td"PTո@&QP |߽^BxU+[w bwGW%ݯ?-#]\u'繼( ᩚ칸-pdN&j B+&`;mZw|2GK0!Y I- 1fUL\&]#7dlR5rU&̶TM۵-}n_'іao UD40D aHB:6cR R2zIŜ I4#{[ILU:D :mmjȗj:(U "a9˪ VJJ^P#&Z rF0ИU'a5G(:Ik!c5@ǣ ̞V_,L*yɔ-Z(ɢU(Ko!dcEWa"&}-FTir l lt~)\=7B}{.@# ;Y\_T>lpօD2idȂ}GA#d¿Im(ysJdE!R$mQwb:b_KnL>Ltu7w8u . Ƒ֑%~D䫀I!1 X )$ZB_2<'R\dH-D@" XRŤRREQ Y*L6x'3h H H`T˒*B$B(%G S2]( 4`1-OhBg,|m>hЦ#)SR3 +%{nD9p,YCW&7Sa$(<*J!sk#Pޤ8KQ٠). E"_ :[5Þ1丘^V%FU`S;pkIi\5Ȇ9E; 1e$"{P2[2Z@pA4)zg[a':JwTb0V(Xp2ށ&L(Hpy qɏ d>rPAq TUҔ)EL#\V:X,2 % Wѫ3dW;&RffH57]?4 q;YX.J.2}4.0A|QHVGA)uR3<o&)`7]403i$! `Xjɑ~L}KO-u[51↥zy-Fmh3> ,:W% 91 G'GimF6bTrTI`0#ݭ|?F{驻ɘY56ѠGе2 c#0A]rwK y<IrC TFow!}Y߸ 8Blu&"+ Q^OކVAh72ƊvL ytѫ REmjvwUE9ubM#[-m4r|!($R@G Yy.nō+I@̤8<(mLC@;H-r,2*ÜkxC x戳FaA2֥'ZMv4oҽ,3ME֨df1[TRR-e- k<+۝`!-t7C&&=yp7$/JĐN92 ֢1w~ƭ&g: èig89ה$F`0umQ-npFL-FT Xl4Y:J35DQZ\1 9CV,4j;fc\`Pݭ}y)^eo :%xKٛC|0#GI{b]FTP=` B*}QZ;[nw+ɰ.ll+$OŃX1 "y>\!O.BvC^: ;2C!DI"r R5YiQ%Q֠cݣ5:H}[rHֈѳdSZFʈ0[HkTϜ\LA Is[jҸOkw׬=?rmj<>@ eM'A*&܍D8Z#w-kKLH-t4Ƌ6 WQX7@S16`3G|@ɄS:p2q˲ IUВJn%"`~7Xf1RhW5:_ !]l"FdžYЬԢ!#i!2.Xr߃*.ŀH56"׹dkT]w*9c70&o`j?.zdL拥Q@jFԪiBI.AUHN߼zu~(9K/X[k$H0|8u?t T?D|v$)4`[ QY.7jQvczx>8]\|л\fuAb2Jlpvƅ=e|Vpr9٬\i>m_moT5폺mp6u?y6(cW0޴ZPVBv @L+cz/a]ڗ/N 5Ld\9< )Z! Ғ@#}tTQ[3`i~Ofs˺yF̵.bS|z1jS.0C &_=Ko(}1fkb%D_T>9gw41\n*:s*2_7u+=%ݣZsNVndiЪYJT{UAg_ޣFoĠF,|eݐtc-M7˻BP>B@xf `zXpjA+?V-(ǪGXI]YX~ѭ-EWNjtCRѤ1ҕ^|+ /QtCv+yǦL}+'6\ -gi~(8,+ItЦ:e{DW\9mo p}+^^Xd JJ ]/tU~֏]#]ɲ]ժ7tU*h?tJ(UMo p-}VC0#+\eMxpvV+&B ?BzgGa `.pmo%?w:M,W[K "w n|3yYUyQ|EdIkYq}l<)|lʎIMѰ[3'{D% Jd Z#d JkddVѕ;\z줠JT W `'{CWj~#DW2t9;ǽ\ɟ' {Е"zhs'7=+R \eBWчNW%:FN+#{DW{CWa}J4ҕ,*_pvSu|ONކ><ֶ“zS_gkF6Eiuilfmۖ97*%fhg;yDa {tVoEՋ}_=qrk/Zг]O@"rс?,5,d]yEn3JueW!d)3Y|Z?*^d3J\Bi> rCW9YpeUvA9yUr]!zb^v! hϗgˌWM3lꠢ7pN*nB#jwc"kTݕ7W7qfQ7|1It.Ox>hϜv]pח;zpK}X2׆&~\~x荶کz97JRj|]k][ZWa=.'Mن!sg9hM#{SV9;ejgLЊ5tw\HC>ZCiUkQsH5Olᔶ/=~BSZ:]`P[~4[5Q [Efu;ܵ;r<2|x"Zzȼou.}>o\ϯwѵJPIo__H{ t;"P˝fg0};9ĸjP2\ kQ~P?_r- rC+t .]=q-Tq50i #Ѩhw9L^V&t8]@koFr% 4".t>_@Q\S>V"^R2grh}y:l?l/T5폪sn5NBɖo`[(aWR.))Xt\-Knd}I7$ \ٛ'9GGGH#S-S{FNC%oN@9U-&YTϫqG0wg`woUwxHwoC6jr:1$R,Tm˵jer3nY٦+{ٟnVIWY~?DXQ\vdvf/Gy# AhRDkޝq5qE׿2Pg;ùo[~6ƯǓwJ 7$;#LoNeTvs'컀^*7bu佳ؒ畬ntϓ~{_>-`}}/tl+T ]V[}i2Sř޾yOR/gg]&#TR=>AYe^'[75cRAǽ(@kz-?5v|*,&c$c} m2ZNK)c6'[DmORg -؄6'2?0!MPMPT ffuygގ~Z)R_܋Sn)G].n9gWdcoj}иT.>Hb>Ĕof/xOBQe]ziTmc-sjk(x]CpIu~&թ[urvhQ²ՙwmm_sgφvC`:Dz8ew],SqrvY)s@bȁF7ƉEŴ c,U5>V^ A6Ru]5Q52 B)gMU`֢MhC6M^1#rCW8gib,).N,5N31C#V򛢣] AFY{hH '#8H(L-CP:j7q*:_0[*s>NϮ^hkOmQOl<ڧrYis:(imNX;[=\~eO+~z'>/}]QP8D+ӂ>9H(&n Uf ſ8Y]PuEFC-ZRTxK.ٔ% )pۤTz#c7q#c?_vӌ}uBXx#QxZ;C9>=j~_khsٜxFv! n~|Gݳe~Xpi\[O8lgv1O<\khIq% ǒ$,3VO+ I:<[qrg#09PLm VP&Z.&U$ЊT$4 5DZQ9Mr;F&'uvzrLWwv4G_5sHJˢ~x͎h cn{9Nѣ!YEJ0Fc^e'){.ʦU{ 6Y(ɺhZ,>*ZP9"A3f!1z9 -u-JoMs| RY[YTJ." f!48[6 ֵRV)P ~nq6h1 tj_' C-DZ&G!*o,M 5)j@roZ(0ѸlLQ d*-.t:aA֣ lrNX+Y *dc#sa>V綵_ZXpC0h Q80(wp&h@ }6$uܐ9!{p *XmW_v?|jtˮPJ`9*j\j*dw ޴PY^zPߨ뜒QkRKDtf`eU,}AuNr̽ {ݳzԥ|<ӆ2pNrbov(~~LaY˷/r8yGow>L~wpjPY)\d9aUܟ_'^ !!'G5]AK* y0X+'N:-<թ'L0cA8~ ,~<Ŝ9\.[^»O/VȍOo^ SYE js=ͷLz3fr7ń2rbݧt{|;xSrvp~zaէc۷׼TEh~5nsB>vzyj{PzJ)@ Uti%ULQ۠ږխr>AK5a7P lT1k[tj6:v5J.T\|Q5N:d-R&ЉzMrcB7"~=үsWtkm!#o,`E;arOLVY&!alK-AW0\ xsb[Q1&<=kgB yBH`S;h,qmR.y;)*-Z1!YAE[ &4w=IOw'p|xcxVff1|1e2^ ed/*i"&VUI!d},@YkGd cZ]Cs ,7;\e$.d'7^z<|LaKV<52m\[TRbk ZS"?aqn{>-u֫s.9VY,#p`bjVH򊸮i wn aRя䑬JO+M OJ͐e|:_:U ty k}t-G#w~Ւ^lK{S{D]}<8-w$eRmg/S?Iq4u/e6oK+Lx+YC[ tG'N7r,-si!ӿ>:Mq%Sk_wHGW~<Mխs7zRi{9kH{PT4WYƇw~㷖Q޸<)TџK"#/}&/ox$rUu%򬎮_Yy}K ˋZ[圏/iӳ3<,ݮS{}F戼|b5%y9|[rؚ46FyP>b,C&I%X|*Sø+[%^w@{ҿ27:E%K19ku4fU-]iXN =9_T{:q0>Tbk|_1TE*UHA STY]ֲ--w"׬.Nʨ&+Z_4s7_(t}7|L+8PWfRnEI!rR1i(WTT3 m~m_0iS:z@2e#w_f]ygl|KKwu˼}-~/,e=-C@A-M60<-jK)z4suLu 5wzorh' F+oو?m_fn`<=Es\O`5捨=;w>lD)zI֩5v㿬5JPC_$SFH(%>4`7oX]ɗYkĜjXh`sDbZǾqdy"UanntD.q=^.lx# 񴏶Oz]X͘.eѹr, sw H:-+*@k񐌉:jV)drw?ms˗uyf]xIy-hEvkMTEKTreLc&czmc:[X1(,{WZC 'BB95b1W'(BN U1X7ث~ `0^yMqσn;=c;|Z>N;=Ax BBny;=q)9K_biFAE_aEAS+d2+?ɿZ|P1%boeAig<PuipI3km#9ЧcEE)Y,i)H-I"eRɖDcęTOw> D.(6 xTux@$X0^2<-C !sv#D%-sefΗ:H|S䞧F('sRZ #8EӋ^+ ,%g62N}_O>Z_)e;BHTCXN,) e #o@Nӿ*rnH7mѝg\6@m+Y m=33-́X?vt)ܽ#> ,)>TPt|}+,a,hdAU >tB;F8r]B\(s"*:CgvS !/ˋ Nng^8ZlZtcQ{vt_~:;\x8n0x.M1{EW/4 okGǃ\'٣둞3(@2m6"37bf7ʡiܔr. :dځxU`v]h4d 6ǴV:OwO d+e$v@53a駄:dkgeNқŝvr6ݲI%ZaCǘ$rmr7 hLe XpI4GoWɿM8V;n멘{Tyηɹ7'=N't,Q:j rQz-2 }ΡILКV*d={ x_j֚v܆T˧<]o ~}Pܬy 1*a2K&tL¦Mp\)rlE(]u6O+E}nH 藫hb4-nq'Hq;R_y`|&d>ͤs -"E'gIL3j;TU \ ̽::Q9 SΠ,E#seX͜-Row|HnDqzu=DKb!q.ʼPb2^_W6?5WfTQ߿~tu|oˤUj`fxoOo۶Avwdq4iџ/?y1ݩ:8ͮ k쀛S ^Jzv|zQ.Q fut՝`j 殌 Y,p,:p. p!ǜ$!<1TXHX 2rfCSl]W:>J3zor|=>73fmb=R!6fz֝U~x {%4H2 $$V<҄1f>DÒ:O{L@OsKR9:8p=L 99n:I8\efg}~<ySűo[:vGWyeuB&̕hBzSR> }m-fz~ٮ7~a11/Cf_҃0c0WcylOܮmjo߂?kt]f(%Ԕ[Rs^ &lt~_SNHw ܫg5}>nnL4m2ɨ}:y|YkCH)HqQFȵ29O͓p'4o8GM)M[JUh$za3NA?Tm1|uN M*2xI 6XC/p<X$6Y>t]ߠF5;&*ITUGWZp޲Kxv1perpegbW_fA=\sk J:\+-+<ʾpE1GW\0WkX)yW \IFU1,D \k9:\+{60~9z[]k7aI[OJn<}X(OBFm[~'jwG_y7u㽍,73oR*EϢez+n87\p8}IM4CkAS??qe9.&ۋ;\DI~wNwn㽟>5 (m!ܶ]ZX}gp\EzVF뽏2+[dB ܖt=FJ@t91 6`JYjZݬaooYm`]a!@h'II|W+$B2@Q.Ƨ c ]>BǬcvy̴q$Ce ˑf ZJ< !gENj.Ia0j5Rj$Yl&+T]! Ba+L op5!t_AF"IKdsIS$SGR˫> ѰhFKDĥ5jyXŎ& gEI/ L17 k| IC:[pXNg0%#&HFY aY #1%:mJ.%8&j-{J%.2m2GNסt^ʬ9[ߺuOo͎6^* I dRkL(^E̹+iVuԤ;ܐd>)&'G$VH-Hy BeGfÎޖ͘h)g>tzߌy$+T$d&[W Ք :^]/7ԧo8owo-Ncrھͥ6~Th2|~|/|{qpAbj]TNGťYTAE'~c4Yk˴>g%, gK$1ѥ9+gS4HǹV.HFjN,ߝи/Ĉ *;-#SZdUh#&nw1#H&IW~2+c{.="46V`BaRh2ɂگ:j0b0.ĨaFǮm*Qg '}6Q@!;u A+NҕdJC$ŀF_6+R *rQ&KXSd4D c֑ɺ0~^ b+"ʈh{D414#TFrE4Ψ,$.c@oPu|d*I+IX.U"P$íLHT:A,W]ԯ9[T>K՞pq̜:]q+"bD)uLHL<*01빓ljwY ɓTƑrs3\<\it nxPd9x ~Ecm]7gR6NY=MK7r*l}V;40xE2K,y5[w)߹Gn[-/C*X$CK` 2CR/%PUfDdrw9J!H&fB#z0LyROW#i A'<1%R/=Bd.p,Z>Y)y[L,˹C*+]j-޶)ҋś>հQSNҭYԥ>>:lXn^܁kkMھ{v5+i!CtDrȓUL9sq%"XFImZ~_YҮ[_ןRr.T|\ igi8y׿'X]69^M>즮p~a5v[dog|&e/?޻銖Ԭ7t`~N1iE5p礕kG~HЂ f:/! du{eLT1Tдېk_*J] 0f=@l|Ghr';\v{@ Ad'pBJj!qtZJBI+e>q?F(b7Anw$ e$@ *Utg#iQ*a2hS"9Cd&0p.39[f'{CJE`vψu}qWjěi`V'˛WXB><"|AJdL'`SNډf;YU'ԡ 1*GFD* Q[lW\pR䨻RmmTw@#v̄ecpҴDiI/oXHE$L łPY(d"O"qHkBUwT8NƱ`K9ڀmmTͥMxq(cO bQ[ urAb)>Xʗj C) 1$/|:|L j6rh'L T08Sgy}YH# ehL imCT,/!~h[SAqX}>;햹(Kڀ*@LH.8aE6iC=>Hq#*[9g?޺IKE @ڠL>')&D],~,6P;629$g.S[SfsX]~xt-q"R0n^r!g22ku(L&'P aFxB+`BTH>g )]jExŤzuqN M08{"]ISƣ)/*u)"edCY2Zhd,|VVa-j8|I~>)l7f(ޣPWV3L,%^Ѩ(ARᯀ6$[g0/ 4.)֫vb4tLՈt,{ !* %XS b ,ID +^ڂwH9z~[pN $k:>E/j>+Fl?]A).j*( XT'y:cPMdz?-ugueUx#rh9WGhw׵ϴxbX׏uí篭eptnۋ?? G[6rU dnw =6~Co7Zm>>s~ʒ[u@]#Sl<0zjt킞Úmt~:|MD_maeqOnҎJpnՓŧ=k  Q:,lj4Bi\)9hF,X|E=w7EQc0| $UEJmd%6"(B$UR%@DvJvQSDUQ]YM,Cj;8ʜaO)p:*^!6@c, r$,-%ja_@; J""Թ|>b2 ax: lbH^H٬RA>)$ B%DPԈ:K᱈"P9#!dpD}C}_=6.?zoKTwW4'6<EXHY!1>qڀ9*PeIydx5<4^3YfyZy'46>$SOrJ8 VuQt)[3W6O:?ݩ4 g/^A3+>rQHe͖. $ 9_!yWRm)}ۮE 8Tg! 2r i#^kc|K}f< O<+ԢAn b5d@hﳒ(6C%UJKl<WXl!*ksQzpU*Fg%2hKb.[@+z?Ktd#du=5g׾׺I_^K-Se=oΫ@3|0PbͿפUӞTD\}8/_/ӆ>`wIŷw_q̦oq#'Jmj,D:0vYz)̗YC(f^r,zOQŭKL(Ӓ v%S5yKǍRN~}lsnenfV cZOUUjVK|<o}J'}g[gW 9-yl6엓Ӯoh2,?dʺ CF{MY䓳6vq}yuy poQ/layۧ\Ӽ]Im?5Iso9{}lg6?_hNo E8v>>*_x|q x WZ5fնK T vSfewn u\y-l;VB|<[5Uח{[zR',X>;Iͧ+x_|vzİ-?:ݟi]. \Owp+{ٲN Qbu^T3mh'Р'v:#K/sLހ9>[F =灇,_lD4E&X^ TB餽 8]?A^|j}~E_;ڑ⊱oN V`;@Hv0q<rt~|0E.U!0U -Dv1x"YicVR Z6Ga*#Қ} ,hT\5tIMټ8wl˼¼6w E˷/ZӅ"! &<3+%Pm+w2 P.K QPJ(JiS8jWR{lt7m .nUǿ3d<&`15)}v2GS#S#*FQ$ZўW%L0ރd("ΐ`gɠ3WQtK{me!0((;SD w!i1dơ*;GDkǴu)s/V>lon^޽gꁻqWS{pkf%Ay;(,M'fW_5NnO JFʸX66*4DcZzȓ;յڍmp]R:vWCHUA;tNkmNhFݙd}.}#]*\v$.<=ݎ R - P0+D%I!!4G' lS8J{~=yWFJZ)1(1~` L5^?q-mWzN^{5UUUU~L,McBσ,J2FYY5ٕFWThr٣I,cFYwDЎe(K?H*S= +T bmNbX4Y2؛4!0(LwrdH1R n#Ěܞ3[5Husn\ԁ}5 Ja6)Kt$=88{5QKgW}dz$g.fJ.W[(0\/=UkN0Eg>P-ZR1*3ReFkOtӜ޿d w=PY֊AT+ՊAbP-E`+bPTT+ՊAbPm0bPT+ՊAbPT+ՊAՍW@ j;=T+ՊAbPmjŠrY+ՊAV jŠZ1V -̽œTm+ղbPT݊AbPTTOTM{1WMFb9Bʕ vBܥ2ZV ɾV jjŠZ1& A1[/Oϓ?~V(p3LN&:ř[ڛl3>z#l?i{:$@cL \SoF3iX60̑! ժ\ *(u,X40!j D pe"+EvUfvRӭga@rD,m-y07*5/ZgHq(Q1A09)&n2TU29e]Vtyy̪Ssۺ-C[$-Ğ)nq/[0tʢ`)*ȕb5s9. Q)#חW Nr?w_ %&]|tűoYQU#9XW,&03Jb_}%*f/]HS\-OſKxi.b]٤(CIobgP%= &,+~=8c@AT%\@4YdL6dUf %Qpk Oh."N!q=77Mg^mY_? Myé^ Nٿ;-$ب+QE?5Ms1MoVr{J٢b3*.~6Fw;":n@KB} 0'i.qKzyRx"qoG }c}t[:ghM !:J"D) 2&9sq`ѐc|}ϋ0Guw;#A|?k}|Wh3C ?xfL_Ijiu}GYLB=,u&߿ YZu&4]*liw~ ‘$= -RjךPyV3g@M& R|bWOVZ?٫ՔW9{B z[!A׷ ujWzr~l˙O]{MڦǏſ**ob!4JÙ(>E0 n- q!Knd(H5jXUO${ϊS2&f[ %z]>+pt@62V3g;2*հZ 2ʆϊKʽ]*xM~y{iG>g :7 _> #v,npre8Ta&hLAF0Y&V,EHpDMP:(Zب3i;fl&iU&#YЦVG-s#<[wڪ2jڍn MKEÁ* |H4F Ȝܠ$]9H,5(")ʈtU0.x\B&fe4aMJ$2Rq$# $vsT3g;~ b/"ʈ"6Dx8QDLiB`K!b ֫ )C4$ҒqB 8>TR\C2D42i:kC>^'\vuHkzɾh+ipq621Z+"@[ML{@ų 'ʑrs9.r;, 1]:]s?W,v':ޏ!`p&k>fOY( [;d$7A)!TJhz7sIb8 iz|gdrX]DJ * RrS8 3r1׭XIӠK;Hj ziyi#pF kICy hi>mInKRqyLh疑Bd.Cn"}ݢd*$PLvsFRdyFR.[\ȵòrqwnmk>9M 'e2DIWƬi>"&*RW4% )fζ1v@clB0?T}7kWױ,a1-r@'AT\a5$e3aqW%+ \g/t>|bCŻ6Ș 7,rL{I H0( C{UD*w<0飱&~ ڨgT&M kC%ܻY>A6<^k X}rK=Cr^o;z%vF1̩bUd"B :>KA"ÙWZn>-n^۾]NQ[NDjXkï%Ӕ`5B!$2%$6%XΓW(NtBn=|v`ZR4"O`FGQب#5ښ@1Q7&Ju4nd*Hdal'A)L DL"rT*p8mxE18;L rPj́FUs[+W0;_lmlz+ij>O9 5h;q ( I p>q\\{! aqIӢgGaZ{b@u\$u#Ի1ZpJ"RȨ&* IG%Eʇ tL00KmК3 əTPYr:1RE@Kإ73V'Xܖ@;o#lG˴fK}7";'8KfJ;kDtCJ V"}b"ZǘLJ0=4C<2ԳzzޘGk'6 g;b6g=V /"O҆6<0Nם_:"#3G絞 l#(>r)4٨%XIIp%Ǖ x2vi} ?[nN-&&8&EM[vMlh}]ܨs@Ag35b (QI %T89RGYfx[~-Jw-oinI.8h n" B+J1x=V1Ə@!zcy߫75[UD-4~>(}OjyJG?4|isDbr>cΓ`T-0{pKG@(^W`oʜ^kfËY n3V'8pQc^?Wi@3t(٨AizQA9BԄkryv aK%#(dr/J)*IȎYqtoPo]Nqo{m+[F5vY KQTշD 6u|#Ω*W='7o$YV wjK_| yڳm^a[}nv_Ԣn?xUtO/(|03]>2\wFP 6ya빪1 %z=?RDbedt~ 6/U!.j&gdZ.XWK*c³g9Uȋguď闇V~x[j0Ⱦ/xst2O'_$C40`# {^Gs\ 8tJNEWz=]TkFDWXwcWŀ vBtut v!ݡ+xW jvBVtut1vQ++Hg 2vBS+a@4vgvpyg *vB_:EVNͮ3 WY4U=]"])I^MS4pϦyhswdNiF@pE-o_Z8}$+kFtpYgVBj}4+˵`]ڸA;n1רĶ%b;v=#NW>1\KLWG]Rvѕ݃lOWv=B.v\h=Cvn{OWCWL!E ˣH]!J{:AjC;DWWt(mb$zzR n:DW)"+k ]azXOWCWR)t +B3+D+[v(uvut$gDS5b șC B֐=ꭸC4 O<nwPmiDٶsz~B' :k s # HՃBӹ"y%ߑ^\o޽~;W>ڔ ɋF'Us?ǟUɫ[]wytRDz4z)K ?MjΛI([֗ IUu8:Wc(ٍ<"@T6WKP$Soe ף?e,(ޫ0 ωG%JZ{gH Om*U g WB3W19,? V1wxs>W8#9(^٥/ |KäX.44^8Sp. I)S )X& K>U5vkɦ A KK`+ัl" KW~0LF?/qe9?D29 )U$lYˍXpr{}--o}n(]à,}]}:V( q<ο-?ʊe6E $t9_@`)g?@ JfX|4[/ ӗAY;XAU[ԬV%γ7B ˚YtOAMLOT=ZsrltgV/-OϷhh\vTe"ʱ\Q ƿ"ta3e@,'CㆈdU,/<0p{aaٻrWiHw;5[ujj|w;-9>l9vb=Zm99H4s~TU q&G3wskM瓞+k6[ܢ;WcfGES T3;!V՟T 3-S"A4%d(ɨHȘ25kDR%*Ě 2L 4/l8L`R]5{t4;mc)e)Rx}}Dd)6S,H( 1MpNg#GP?9_MJt-N7l'q} =3;^1+Qٽ.]\x5y"]Z'ҥ s/'B]:GxD'G*҂iWRoك #~g@WMN쵎:rI`7ϯ޾xVћ7],n6Z{1%.G0)`ĆddU5P F6*JVη}m#AC=iUT.?.zfqi}\qV ^eM^~{oM^9ȶGl-lP߸'s m6#Ѝl%EwuyT M:Jr]BbuIuC!nʷv9oY킺!6&~m3|!Ȍ^RhuHG8t[Ve(k/ڽf*.Wn3zE?ҺP|w<lIj'2&S`Jl ?̝ţvG>ʎ EZ% 6f]`<]nGb)HN6v bkzx`8^}[ݪnmޯI_^L_ N?):E Nk5_C0`,8u'c`,u'VZæDs)+R WsFjM,RE$;Nq,U{@]DPXURS8N8&(L y0&΁˜ay)Y8;ه!~N:찜нޗ:Wof>'ĽxO|'1;k,*@ǢTԣ5;Vu&śܸw0ա\)M60'0)%Q▕ 652$%ڊI5x85Amp٫կ_Br Xsbf/,q_QV zvqѾ.mUݥ'{Ϊug}[㯦P3. |',&G&LӟYi`F_ Gu;=[8j#Xz5&@V* .g^ z%$g{M>*TV s*ӭY *!<0SV1? y7z;O麫BߺGd秒r}s#Perx(W/V9 rZ^qJHq"ݫU㔥dLz:xyqsfœp >}I`*,\RFq#֐+xł9vv4;S =ݳ?ABO[M`]k. i3|!c7>B?>gJT.oX*;S}7u[+Hm3S.NME|̨L$'AG2uVGHp+*u'cBJ.dQ]@-p0 BQ@el(Do e,R3^)QkVBVqobHsٍns6L v~o?>Y>d@ʅ"ʅzX^5J.U&%ab7uUTA9dTVJ$vD0De_s`E&~EߗEZ7#ܟa]Ά<#._׍6Tעkje|?g{f|rY[6k jgRRK葬qS䫳̪JC%nP:$&xux?2u=A6NMJ/kjJaR іq8-82[B?l JԯcG[ ]>i|wuUneX\~^?ygإ-)ܰJ-kw=IχE]nX4?~c'{*b[}_gOPm~~?,U޻9QPST ~Y~Oz2q0MOI zLY 3eO!\򽡈}C5UZT8BWh%?Z 5aFׄQ緞Ûmv7S p&NV坬ӇԁhKo;ѓ^_=]=΋՞IoFn'ԑ$C>A3{=|۷YeSϛL|[C'.Rwo7C=.xx 6YKAcc7_GGѾ*0jvq1]hN}o?in˴ebWtq|a"zǾ5?޴[HFeFg/Xēs,GϷ gTt;RC bfb)2UbfB$>(TMN>3]t}Ci`)u|sd@u\B'@&C)5Ã9Б̩L~ޔNɣzdUǍ~,̀͛q ޝ)A;ۜo?#;R-s~ 0&a66Q40RB4l/bb|$l-“XolfA[RO-T[mkġ(޵6׿B`'wկ $׹q`l+us"%JHj,y/3NUMWUKtUhDA1(1U3R O >[r*itл(l@5iF6^q@;wڒ~O'kl\]۸W<׏,Ġ?S9E$퓶GYuh $uVČij!kKV؊@<I>PO/Gcks3\iNpDNy >)-YEU"68<0OsC:mr~#wozb#KC 'i(S` ͖Bq>re\n~0Rw-ztv]cǡ9Cρ):.3947^\-"M ց<$KTͱ:S-SQP,׬-Ԛ*ega4MK^K`vK`wg^-&$gXI3=Gǻ +˗DRP%Uw8E􉉙;yePs5%ZW\z#] ӂ 2Eqಢg*!\$f&_f7b(lifΰUVb775۾Yri/:l=ߛض:"#2䋠S'/D5Xg) f$I7.8X2&`Kӻҟ +(,PmҎGzmZEE(7}lj8 $0nV?^iwƶݩOS۷[/yqRkC#90W* 'TF/^+vt0gt<@t9ؑqcr2P&糩jשvٵjf앪2|p"8IJ%A+%t~^U%2~%r;KXh69_Vpbo0nVwXϸ_wLrϕM5+X;TQ%~:riH!*Zkgm{}P¶ytr]]^/v k\_U=$kϮ.d: 75+E;T5/hj*Ml5y.z8 MzAfR,*}#K'bvnoTlBƨ\R'O9uXK֬ACL2rqq z`jfoS!yE[Q!\oG}3Q;Gvk}ԷYza={;"`ZW7:aCtt%۶ژUkX誡jJ:(K?՞UkRiDt;DCUX誡}mr}9]5ttR$FDW w8p(jp ]-˝u+1~DtVU{h,t="+ܸZ Q'_ ތp:[tNr{VIM/&W7ffSMH %tV}{q{#MC; -Ű4mn?^OOLey_nSsH~-NkÆwWei]_u79kbo0_̓ ȁ F8o:}=OqOهEcڋԮlU ?֪;yr ܞ^ngEȎ[]rN]_Ȋȯ FWRMC]"5'l_VvŠޮ+klj|uPg-cbmq/p&gkFb0~4!vkXBl]J>3cݘbX37J߸i(WLNWbͥGDj#Z>fp'oPڱ]]mjDtk7jp-ZvjJ F]5!zpX hx骡\1Ry6P`4tNW 4CbXkңWX誡wP]#]i)wZ#BW NW C2YUڱUCPj:KҬ "~OJ̘hYǫlph`dvJ44#xD[ 5c@h骡wvHW^)'^JW,ȏ\9wv цK/Hc8]mؽuFp qfhߪVr3jǼ+]]m-ԫ ţX誡5z骡@W{HW1yW,ȍz΋誡U;OW %]!])#+J\cЮUCi@W@Wu~/ y} ]u)(%Er"(Ll"2"] $#l{ $-C/ګr<]|-~t⪛e 5DE5sq5ZfKdN&jH+Kbwȕ.Ge +V` EHJQ&6ĘeJV j)}{p}-Tdo'rU&̶TXsBX,c2TF1Ƅ^p IfHP $T:r _J* "$B,Bu46JӋS7tqv.U,ժuQ*c&!rU@ZExRXzh){e YzB6ʌeRiYu2sb'NT)@fk^f,7{,@+%YX݀6tʶX*H̐A+ Cu k`UZ\5j[1/ɕ9z)@xf 4=yޒՇr}yR?drօ$d # R>m\) ?kPf/Ѵ* K%V62JZ DjSUQ5$WAH@xn֡Z 䅮MB@vEqp\ìK& }h}K! bD-)+/GvQ)]L.R{+ Ȟ2> #Ԡv1c680,)dQCU6$cF ,u :D0!֑ 뒣 SXH 2Sx46l<)6¶<] <:Q[5_ zs(S ΨqAd /M4M%@2”G%Re89@[7)"]A٠y\Z6JjٲsL)eU\`DVA 8`I[ae\cvt(Ë \r C8ؒ1*C 2>/H;;$؊yoөH c[d,AMBP`r/p% ?$X9cv0DUExK^!UWb,p`Lz @:5Gvyt"3l3\5xo\I1~hxS^{(#!AY!u R_Y0$);\2"nQZ\^YT0Mrog|v-M7H Lll@jւUX&0_'ӂޜ]B錚%Lj  Q D3dAR n~BX'rAAN9" Ryte"LzK+Ƹ$"V_MhG; y=p!19LH. RrHnC"D-n3,tZx">&W])!4ߑѭ1HW,F%eYe1b)5$!pyO@vv;EN?ήU^kvrڊe,xTerCV5.0ANmg A&lZfXgz㺱,W zK&7LCwFp&#YJ픣*@R]ɏ{\$5eH&uh0<ҕb}Bxnb Ų{ _U2șI*ց`C#>bʸ+ & >7;P >2M0̢QV ZkWCV_%hxm '7k#۞4(}EpS"Gx+GB[h|A\^!(]?N)15U] Q@)zTG)؀ﴣ'Ái*>/&{hŸBDmgRz!DΣ!95h- F.}U E rםȣ8)SDG: Z4?=ZmLMMQt1s:BlRtAϭ? d@uQQAi|-uf%[yB0Y3FSms#'Ultir ?Aj o mBwT:4: 5(R9ik[)#kZj3`+$z`eE.d5bJa "؃9>d4bO{kz F!G`P#nQ[B0{pirXl 9x6[U8v4X:D|q!#WFR,wAm*H( ֤|8˯tη9&.1Gcg:B5?zFo^zv6D-F%v>,K_'.wHN_|s CnQqx?emDr4GݻzY]j]]_wOo_? 6׷oop7oR|wt_k} ~[ƂU4۫]ǸX7jz^?w]98h;v~?g~lܬ^ UAn?߹5o7_[sßэ`jWc&wfSuy2nzYlNS~O _%۴"=|{$ 89+;8O7\A%4\WÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjzRU$wf9+ň1\%[vUBp\=;5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp'5\@B ɘd:c*MpU,i5\p9KjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WOiW ؑc\p29Wep+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕNjPopsqwr.np[(wuްh!(HApàÐ9>@x'W,8g1bHUdl:X%E q@p16uN XmUyB{Y%mFfB^O5%B]-շyȹwFȶ{[fOsvaUj{b˝lv> \\n j}qS]Y:~j@v翨P1?/WxgSo/_^yY}|tvT V{W]k_P{7H&㫣 {ի򡞚 \-qݮ~~sW.F/>۶P9/&0*63q7?,n~x?uGlPJ?3RӆTTZV-UK[|GUgܛ|DuγeY/}cʲuY+Yj9e'ѧқs^e~o>Uk}=|Ҧ`Fhi%lr*tGJIs1S ,W~v 7PIFgB8](N:3 A Xn Rp]4NWHq5C\c •Sz+W6NW2鶘/WU F0)C<#j\J#GWԺ >i S\=V •e+\d+V7 YXpbprt \AI 7d1b+LI9*H;+'d>x\ʘW3U 1L&gbn*>kH `Af7&YbZk1*I9b:!I 8z+\䧎+Vun1,)<ʉX.XīJ2. \AwrXRpjߌU-+?9_ \yaZ?0abc\ycަX+j+7KkjʩEWgec [#W,&)bc] Sjr/v+Z1ba*ĶVu+Wɋu^ Xm4n9X`MWro5\q%WUMD28> c{$;6dTW3UHi gb7p 00~bgʊV-IbMN XmST߀ \AptN X')bLW2f qJ! •6#W,)bib(f`I d(WP:Xej&xWg9-bA &OWjt5K\EK{vo+ketz^dRq` a0L6c8LS;Z4^1=CL']r}R 7[>ۘSgPa ӻ+?֒Z"rAvZsѠJ9i%KhPͦsfx܀;By*YTu ]sY_[]YO_}w`Vu?]Y|RgkS$gׁ:|^ hN[n82vhZNTBns9[:kE__l3. m⳽ [s̅R0~7ѡ-|uN{=o;=_{GT}}]0r}xEM0Bp1r6\+fٚպ[.Xe:8 b+V 9\AG jK< &/Ʀ2٦c$w?cW:3 TƉ)pUWzK \?b0vt5H퓟p1:({-n6*WIѠn Ƥ)F *ҒRKۖ^ 4_t$~ѯ ZEZ޹ m c8҈PWQ.MJݫl%jc|eCin7!ja Ε H3H0s) Q>piX?N@"1x%l5͛4EKW:Cn^Y9p8ך"%҅pT]BQbuE!@ZNAe;_P|P"tZ2&-ITPLrR yô˜Gf6\$#[y';UEE.?,/}72"Q5j IJJlL#-5|r7Qu[LhrI5LUcZmA]w6Բ )'t׭oN{|ȝ/Avjt~ëٱSvo1&"ݠF!\M&ں$ b4ԫyVglя07'33칗)CiA t\LY-)Ko()3\LO7óI.g/ȚQn`ڻv>pګܼ;.Ab &~mgA3>M6K}xA-[(7*z!wzA!A'mC!4uI"SAd+!9ed.~잮f;~K.ꌐ>,βKG ]\l gpU[O_QvW6lzxE.k{&Y m)%r0>}b"Z$n>e1v(xv<"EZs<4ɣ)Vj|TYq-88hdȋ2 ,( 95XN2Cl?bYU5<х␕RԵew#ئldGRB<3p[;q@AQS <7(OF%-pKp8h@JQʣٸ"0]+yoK:pvFG?9#e>t #F.&pqb&A)ڶ9c3\ '?zn5k tTJh/ D3R\5u)5?,wTiRhpT'F87Ǽ-OLۛwS1JHkzۦ{orlu]䟶IXڜ+/g4 1)rl*T aZ< SZwgSs1%ײy-'F{FiKDLV59[x̀a}O?54Lٔ-~Zn* }F OSvLϼ.|6&&I6=K`| RSP>9J63 ̪pt8/,CH2: uga^s MK:H#EKh\HY,(Q8'LXτJ(:zC:/J^(֚umwFKmk9Zbu=m_q|TEz˪g\ }P5OSm>VetV,7B J  y*cQ˜XS鈣+cJjTKS}TZN3pzYC3pxjfB y|c$(E*Hzx/d7\xrY)s}$H!Pp)FKK9s+OAC^4|hvZk]Kv>kFlS7-GlWo.׉ڴsN_X's€+\d"W)䏖,^pjV22a_~^v3ںݐIn)m˝l'3ebuVlt\(GuhR<&< yMwCnp&vAǫ%3'S5 oRh&0\ ڽ }4,KAjcLl*侰(GBPjJ1֭[`g\&s@D.r 3x%LZۛGhDѱ[3tfl<͘ Ert5 nh_ow/.6_ z],Kn:3ۄΚH@:D$ )*31ɐ2!pƪ\pjí;_u|0ڶ[V< .H@$c hJ5r8dP\fB:ˏA!ꋳ'f֭2@MF%@3\,g \0J+Τq` G/oH폵5mH^%N*Ǚ D+"8'APPsTØǜCWGFztFa.D,t8̄<_B e UxEhFdyƨO8llݑ2UCd/ JFō`:Ь%!iW&*LZ~4;13ةlّbqx6sk9yY|9[?WIB|Cmw=ɳiyݦiCͱ W}Mujgqz]mo\L&ògAQK(z5QػAklx2F<5_^}}%#']6=ל 0H"QXXwmH*^f2,0 &WiYr$-編-%ZAw+uT<}EVDIZ8gߘ>i^DHd˛ؘJU_t*hdq׬3""T-t?R'RCW(kvwhoLRKLx$ʤT9,#9'_jxdRE7 mFQeN%+) )dmhR͠x43*;"d s^VAxPXH#//aȚJ"dy2f=))^aD.9"`M1Z\,J9R<>U9RrҔȓM_ XwӜ7Gy*^q9}d^3L׸g}WG91x(_ _*G #vAisɐ!tEe!9CEkQ#r@ RQt~tj]nktB ,Xj(0$Bz!T*6AYY$G0U>Ua60O-wnO 8HS@l0>_-gl<9}գ=~>WЋjSŃ6>\  Dσvh݀Z"eD=>](WT1ҵL땘#Řփrݧd+B`ж |P_eG'[;-^,w:ulO(3PH)L6G! VP>cIs}&ǣJBn}򆹱I>9vp>AKFS"!l% }K;HjQ+dC0he_䝳ayoGNV_nrDo ~(n ~7Np?us}3̚jr;faSMx?@ʔ\L%u8ա Tt]kQ_(rc,&t^(h'[ګ`EG Yy(1dk =^8#e촕,Y3ilJ]%i-3Ro2~H.J.2(]HRj^;R"' 1Qlfg|LnDf(uEtKXv3,j0ȭ-BEi X?m[Gb+)SML,&:S_Rg jkl Lrq^D`rȄ>2#H{o(zĐ]%1 K+V(t (s}{O0Bw;QLVo%L]9Na\gBV>!)+QL[s(8gt>Xtqx5W<ٝMN9R)JV.D%;AIK=| Iyb m@P=/$t$shOٹhӉQE<㥾XlJ+Ƒ'Q@`-+@)#llsˁ;Fxrw\aљן~lxvdAvy4WWMLkѽJgZR?>ìmaG/ٔv5֝nxR%85]q7ork?#A:Pv9"\dKc}p/2?XP'foK9hwۘli3wʢ<մdٞ.V.΀B B-A⿊ѱo[(<}HuZ0}4 3.{X8w ﷻSz@܅twO~5{z4S|}wV~[%|хox5wI.nK |02b霷Cԅl=#$ HSA8tq 1z`cƒ0'0Z夤_hmKekCN,B6U %-l> MZ2gˏK\9'{z{\7j2n`5ߙWM6{b u+ưqq7lγ{y$H?d1<L1Jo43-jm߫qL#Gq%˖cxmR}_\uRՖxɔ*![O_/! q ~ʷ0"D\J^)da>Nq\Ì/8]pb gw:1t4.oVޛ,_ZveXê6&׳mՍ>bXd‚>U;ψ#S+TybS r־K;)fQKigY)/̸ZX15/Q}+sw^Fݒ.W9]U29gȕxgogkffUK_eN3ACx p!nqtoZ&# պWOK׫~o3Jk`z/³uP5jih.h mZ[7;嗾+N}E+m^Xe}ևw::' |HQ)ԅ`R$JJ]Yfd7ģ+p-Om8f!'"<{Sgkq .,q6 ob.F6BK%)8¶GPHZ#XqR\@ڐ/TH򨲷&{ p|nƣ/_1-1ob~XZM7x][o>\39_Y}*p E)E#c2@R0 "G+Yt ]u =""Ա<=0 M"bd A }6HH2RdDTdQCO^6i)E+ G']2+wA ٪AMߐ1!wW#4a&/.VT-6W[TݕXө~'9Xr$(mT!_w C-ɹRx QٻƑWi>:۱'<'&$mI )wxTEEHLj @e{̗w޶;u[kYvyzXrUBZnjFb?dqPGia))mg<|,CVŰ-ðF?x0_MExxkzy>};"wݩ1 2ȵNKBk(ܡW2!\?^AI<=-[jOьy5sCڽ\wmhq}YFMor XBUZl; ǎ-ּN<´mP)nsl}[IX֜ L/_hsB!{xi*4{W%LjnF'蓽L ]ys{OrޕkY| p54|#i^m(r٣ky0'_3O4W2*={,S7B jD2H͡[ls[Կ.:X Y1=r-=!hBlF ̢&*i2«Wx_x9-y`e)m2^jXrɀ\T^(2zWy3뛫s67*+s VąUϸQ*5ǓUF{ yXgTl|R+TK'oc*48(R"J`M0'jTKsjinV-};RAc_= PN0)=h).fZxrfE &jR_=|ZqVηX=>>~O26c,'*Ƅȴ(SIr!514g=:C]Û)!f^1{U}՚]p4Us<,N_Dm6&m}#r $R[|o|ӚR*zr5t>YcM(%Yjժ iC r5w*0!%F-P<"#S!9<2D)Z%O$STE:R .)؆*`Hi_M>Ϳ]୴8t3LC8O8\~e]L?aߦ9}ۣ=>ƟG[E&;aRLEM*-ZIۍc5 A6A h gOu`˼c+o=DJ}\ QJϙ. !EfdYKG&UNLs_ʵOh`ZOE)Յ9jn{STMŇo鴖$BuҌ 'D"q&= ܰ"^r}:;Go/vj -ʫrd |H N!2g@Z0LF@pPj.uW Lr=}W7.H &cWFp֌5Dݩf^WALWOW Jm6 ^R5gpKY;/T՗/54jK㱱26N8uB `#Xq+:cU`sVB@ /!V:81sS0.DvAc:WOA|6~5ǛzCWw v 3NFDE贖&$`7 xJ:ȧxP* #89d\F S jVg;yjr۳M!=N6sK=<Y.DMDU(ΣG&J֋>CN:fȜ&[+ r M~)LX1Bm*E޴9| hu8>?X ohyJ-} 5"S!5@s2 p)JMkqwWY{zWKZzk"ۢwwܽc-w)mqٚt]~gZG"̧$wuW\MpMbO[H<.|)ʖ)1ybYͮuUD0_KhzB.ciVUz- aE" e]Kp-JfM؃o@a 2Ps4\HR2e Xo`'Z Dc#9iIq. WA'e2W&+KDq˭0)0 ؔG(mE/ gDwV8+Ҟ}1`#dY{D\/q'!x@"0+](՞k %PsIp.Y$UP2Mwʃ" Ȗl}& }p$'!\Ha 2z = jo<-t^GnͮK̔^yvm(iNiw}9QjFlɣ#h}'j?es퉙1[X+*?B 9i/y":9XK".xȸ#/<7duK:fR.g%MN–BcGNQ@R}#co<*aao+ʞG,<(PTvu]4~Vˋu<Ӊދo}/T||] C@%7|jJiO+ZmEk3 Fa2&d/Hw,ƞN^`S(z6L36s D$ B/rFl?k&`4ؾcWV=Q{`7fH*r|H$AK$sd)q(ČZ(RRA{,v H2i\I3QDD*R c2v}6W& 0{]QzD7-٠%Ø-yTNaNsGf.(c^MQ|\,Li$8Iq( "j>L*)"@F$NwЦ,V{Eא|qɮh{E3∋m#B B+WJE!Mޢv61 4 ˂ H#+l9p,瞸c(xcO@MTXKo>#;kcRQ,<7ƨLmJgud.}@% "%-B-7H_&Z"N=iXd%kZR*T_eċ&8MWhyk1Bz1MOjNW 3meV/ \,(P`@+ƳR~jW 24aivPf?}y +%Vdk-q.rBwtI*U2}2e<9nw+6J+tVU\qf1pD1cA"SuzѧtffĽ#1>1!B% s[fYSBQGW\ZB%#\BZ U!vEJcBPCB[5Wpu3\mGzBGQkVT"alW0ծ[1+`hઐUVۡUWL#+"U!=tPrWWGW`#\XP8t*T^#\N˶mb2tEaKXT={ۻr'C>4uE}ȚE)RlТBs*Rɔ!'-fu)&굡,<˷P"`&??VƷ^J !Ļ7]VH_KX;X]9b<0 $cLl,h`S:&0fRZ5Z a C؇J`vțC>}%Vt}|ǠTc3{Tx5.TtD|vΘ*|:r^DP\̘"j+ *'gDcs٭;UN}۵dCkYO\1ZI[]?>o||L u%1Zreά0J5COQ*z}Hr ؆v{yz %SfY[lKڴ⮝rԝ_)KZ7PZ@&ga@ *y"(#I8!^e)4\j .% XXsQxcz:1B&Irj 9Z.~9-*()ÑkAdNI8eP-%\%(#*`ZyDL2'$M B4T=wWcd*$P:Lv@'ϽZ4&W,mf92StYF[ ʝfdfKൟF30IÌT(٘5GZLJMTd]ёđfΎ26 P UenV31B[mVnprϐ:_4LDrDZG,_#ۋNQcYPIyz{r2!:c*D&+BF&-Xp~NYv^.^Y C,cdabf 9f=ctv?OU0~ sGcM dFN6*f3*ZAĝ7>}=bH{*SEkÇpRMML/+j=`q4^Ƙ`CUaћNi1ht CNّ5}pz u?0 ؔG><3Ag9YHSsճf94ÝIR?[LE1(n`P)&IcEV!?e :wY=a7_J'\ZO&iyv9C"3z1?Y*ΈY:I94X:<>$= QY|Q!)X9Uus:1]-E܋F'2gy,Mǭ/ Z,HE!utV~'Es*'o(숬Z)ڐ7-*2ܻVI+2+*j"yEn4[7{G[lH)i{WWHs۫t'ۖS"ߪOXM6&Ļ4zܾ=6Kke :N_["7o?KW̚SY?) Xl=ht2Z<==΋ތlOYOH#+(}M̷y1a浑a>mouu5wynoa}OLd,\gtobnXsЮ OݎyN/1gǓ]50Xj&4ďWqˏxN֫_ =:yn~dlBX2,IڙC{/ Y ͊G]&Tmq #" //3   3. i!G)1qp2C),cN:@c*Ĉ:eFw/{p 6j-$\II3'HA 5WYg{e|$ĢbZMtbxv, =#|oȣCjgERmh;0f6+V3}dQdicd*O?Su ߟTǀ䅑襞9S"'QNb9$&EPMuĢ.;L2\[Xa@B_ ]բEf}(Ez-UOw"{җ"͠SЅ&;D!x0QY#K2@(:R"-O6J9;.!ӎ!B^,c:O_JxβÙtMl"aݷj5nL ^gxi~ݛִLvy Wzb䗫eBT㗥N~`'y>4)Py)fQNy6拓}+sb-dh%&Ý_9xobU3.Fbzxjo~\~Y{@{=WtknS(XK.\nؚRCD{jH-=-?t YJN=i8ӄcF6~hK⏭1Ms8+m~"\!8\2%6oʄ\7<ug&15Cr;\t &ɶYa{."TR,^$S1Tx 5J5YfH`[.Qתa:iߐý~56yݼe]ksγ-?9/f^&nvfE!}*;Pu:ESlgkP)#+á{ҞF/J}0%JΤCDT$IPz$ᜐm%,Aa'o~H0IOd0*09`:h#SAthl4FyHxo,Ckhx~{=:Lٲ]SO dS@łne fīτWk:;V):'XEfC:o v*aj 0VAXL@Vۈ]TԼC1fC¥"}Bc08=|+-U;%(Ĺpn&r.V-mqY~e^+(-6A8 M584%6PJd ЁI,`a# R&Y`K7~ g}z\ȔNW^~K_r;X64+FfLN«,_e 1+tJ3;YvL^ àp}ͳL:Z-o!qYQԤ;}:Leb O0 O? i4+aRµ<`%XzIBgD%ΆboO } PbFwr].|} 7鰷 >:j3g&?^Wm8<9/~Q뽓~7M^ɛ?Nm8Ѳ)hpݱZ'<?ע2lݑG;kakP^m-GɚS%B(|푂n9ShO)d`oeTѽTƒ\ u|;oِ0ًɃO))4XT`B㕩yVDlDk=&Z`fRɒQdݘ6~5 ؕT&*ixJ'?/n_ΣuZ>1\ȇxƐN"RgO&\15Vf|a*BgAFIuF"eCAs4:]fg dҨ,,3JΧqyfؾ>*DUA$K)MZ%Pg#Bbė3oҕJ=¿&6ꘂw}듋l 8C_;{&ǜF^|NO|ӪraxN[.J7fgd+TA]^CȠόunqD>82 ov)rO8pdy#L=~2>roܛW|7UzsGR8鮛y;G6xdzƍ:60m3nFś2   -3˝vXDm!%'X+QY/Ve(^uA Е~=;0u}3! imWn~}56[T۝wtTO&`koe5W^ap\Ȳ}H@\"01gd(xJB .haڋ\0 H\ ƧIasauoqx\l8dViB$o!U{, )1ҊhoQeנju7}Ɏ[y~[ߴ1^rb-v7o,H#ڷ<ݯߺ:uiz-ͻ>o@ldJIP.TŤdg|dIeu|{]҂Pe9cVhJUʈ$SVd e#.[Q"cF5P!6"kG/(G-+ ֝$H(YB@ut3q5}R*DKhldwj{! k&HJ ٠3 >hSE%-_m5 OOb;B:8Fs§ƊLNjqnưf]<>P64}@Wbڑ*M\%w'Uj}>__JV~ϗH9rF{)QŐu0YIDVqPh&ntVjsd|c21 508umf< lJl6J..vnnTKL3!Dc6RDpQ (fQ1s.97C{.> ʼ ϑj5e˺X?n~|Gj_z$d[AV퍻^Q'Ǔ7e[{y?%5p,`'&&*/VN30)h7(k߷Gj[7CyuzZ̓:}65Ztu&脶SC] QuWF"jC>eSO˦:M=Y.[ujpJP1ѢB"Lk@KLHŰ,$@Ye EK[[imE%zSYxDPZ4~| (QYu-yݜG~y9߽8ez룷0eGqʛFg>oy^0q <ǿ&Ǖv]0)Z Z^Y,Y$Rb Y?mۃ%:nxC_g7 +RY0MCWn.}1ˇy,߭r:l{۰vWu_6y˳ z?S㻝Ѻ*nZ9(3؝y3tHR\aMĮ(S]*:#yq`omn%z 5Ϡ&`^ Y뀒 b֙XKF^wPXM?ۺduuP:OZ/~_a1nkP˭ GfzOd+>Io۫wQ$Rym_/.D_6Z !t{e_^gtfz_<9^vn6sx]\zP!ߝKȱa?#V-MOw_ɉ䑚)| aNY/%;:$GUn/JL}){K_On-##PQǺܣ|x`~۽0,{qiU7YCEysvG&lCli)QۯUyiVOsؐo6́LR-R 񣔶1[H?cB} ջ*ms~Op7md;C<Ҭ9'c"yTh{3l3]Ok̮6 2=h14 zRkwv3J{ٴo_˃Ұ';1MGA9w#%2;4j=p̅1 n/ZaIK/|WKszzu_&rw@*0f5"{+j1=x,fLt.XT34-d uanHyZ {G*ڢ9;|ot9d# rumıІ`7ĎPM:+:1U`5Rm`<>g ǀcL3iL)1T!8>#n@#>O$_ʻI1f]#-U^R[cr2B_ϛ{!XYUF3zieϙVҊm3)'3Clkqƶ3 <6XٜÖݒyv%[IkmߡsXT0!m.f$XjDQ;Ӊ#%Br !wD9Zj={b=`iѾب5Jlz,6!;3M&f Vn)Ƭƀ*2` `ёJ:hPeXX@wN l)P4AZe6y4rI+z3bgwPQy@m>1h-Aa[.د8w E Ar:CPCć% 0l Jkkʡ}`uQ6X2_k|DSZfxsK'&ߦdh\*2 op2v9P& 4 ɃqKYRpP\{B;Bi:U`R7<4`-%D*zC Avekz TR +V;g2M!{O@C$%nEX0a혔?%0ֲ488)ԙ=hU r6љ(O_ [2:l*zc(@Xhq{ՠ"/:l";J` 'lBr!]VI{KNd^TuA]0v9̊&۪#lS %Dh 핈^OHܒ` d,Q"1hІ]viĈ\+I3SES ϏA{'[thF.M7Yv&g4'X@Q\sXN#=̻:`wolŇkyZ͎'/x &ްٶ L0c63eV&> :p~0}J_` JDkk% ZW!԰˓]@Oŋ`Z hduFu !x t,GѲ< =%0A%b+uKMANtmXyT͢Jrj@;3[r#V"xe;&{k _6H ju1r׷ݺKmcͦY,*VlBȈ ǡA]XjRXQr}\pGYi _I aʠvp.%|m9GڕZ1FcGY$ף;Pj^vem)E5/h3V稁&Ya:,l@GPF)nP:L/lM3QZC!ZP {8xFwp.-*Rvb`+:lA ډ;qPk|¿?X+;,s)&529 j3kETʽ^Nu3{5ED{BZfF[Hn([` Ap_SNt fĐ{imKF;w+n涀iqǹAI #xB`f^ #w`\wӪGŬ\kIٺ yhYDh4v/fc/Eȿ?>2#rIpݰ(a!/G|/vts]^`72V:0ё]r N VRkK{JyHX׷l3셍C`f+C %>-' 7^#Xɣ/ɛbCwLN\pNlQI\{(1# zh E\3㨋1յΘlF ImL15jxR3kѮU5}V(rdD[AL6@RmB>[{hl >T}l9 ynYvQP k k (b^tnKàekV3_,N M F;`%p]{:uXOJ(8qK !Iv[13zp4L6|XfK/Nn,P˩ՆXMԤ$@"&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@'Œ'( $%F~5I ˫y& ؓ@2&N0 ,kH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 tI GkJ9l$($$> $A $)&d4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@:$.6G/gx)Qo/^Pv3B hpxKn\\ .|% IK\|%wOW;-_R?ቆ-卜dA;>N7v#DW0~ 鵡0swWo^ @{Z3^JNX`ݿ_b{f[g׫۵JZ%p_@nZv &׳K,P/ݫOs~}u1~[Z$_g2޼?7=v9_>\`M:/hz z_?A 47v+PK>^-{ek?[|uZ>-[]vKTR6[pɯw~6p3y?7Ӊw#=d/kk/W4ȵ-y|VܵVkLa[1o\Y4ʦtW6zY3ِ\_w{ nd&Gϛ+~dc:nxY: -< EW|]Sc/xEt%zJf%%+gYZ]=Ŏ7rV'{tŔNHnc&3mo#7ǿ˴c@&f3y&ٱ6HL&|+Jܞj[&GLZj$*>Hubp*S Z TT\"$ KrIVP \SuZ;HـW+2@AJ /WIRp,WIeoDJ +,u9`\ZTJ*yIJK&jM%3}.q,堷{W^nhzvD) U1g xfXl cMMQb!Lc\bvA,eQLFA5zaԚ1B%EBTg$W8{h!Ŗ9 -+l$W8'V\ 4` \`T1"x)"qe% [\^1ōߺ$ɕ#֊ S3Õ+Uqܪ4Qҩq}^v=TZ%+NW\ \%F$\Z;J^qu*QN]\\\Z;H%斠Up% â+l9$U3JWIL*+NWI lqI.)WI>N*-W'+`6-А<%hhJ-]mP`-$b0eiR)Dٝ" +'[̌(]l,TW'+DUIJ\\T)Jj?w * 8NWrJّ`V \^Ve?#T[qJXY: =v5HgD#e2zts, \%(ɕ JUReJ*%:A\ &TQJG] ; z#ćq%$h$X(] \#KUR=TT\ lA"d1Jr+Rk9LW'+ FԂpEAc]%ږ+R˙WI%NJSĕj~f&L^HsؙDW+?s,$XrbvI./fF4Uψ&FTL et +ZX^J6Ӽ&)& ͠fsLB3:-ӵmP%ߊLv1 4qrV\V Tbg>Er-mI{$HpIHb$sUR3*JǕ9gA{0fEǃJ6a*siT\=AH. \%+?/s\%2{\%ZU\ @fJ !cqT :A\ ȂpE &JUR+lJ*T\ TpE2*X ZԹT**NW<4JIˠ\%Bv|RTZQqu ) W$X$c]%2URj$q譴iꦒAЯ\p}F.@:;ڽ;P/eA,JKg& L1Jru1֪qE*;Upe-CJM9b\[̖.kgWI\ޞ10v=ɕ0ajH3Õ+[qܪk_w Kq,WI-W16Va*s0_q*"Zr[`U\ZŲJ^SĕdZAΑ;\JURyJ*+NW Y`4䊱OOv-]T*]qu.ɺJgrBIRpj;JTW+m4@e8ax!3i-Hrx (%a[UN.;K0hsǴ͘>AL4Kk~gol|󕻞nKzͶ ]b.5IVG4پxs}VǏv_lRol{^ڢ{o7o}K_l^?ҏ 8}DXH) bzߟ_;iӶy16.y}~3}}wa #_ߛ~k|nvV29ͦp|E*aצ~1n+oV~]m3P"jtgwVNShP++ҭGJƂ ewndcIt؄|ᯛ5) tML(b$ 'E֡Vu\YCK PL#I"{K뵍4}py. n_ZmYB k?i{wn>z3 u~+n5]n7n}χvR21noGծ&L.7+Gӷ~/{ :[|{O9oP_g:UB|@ .h8)Z<: Bz}b^[?Y(C1Jt^Fz$Tޢ0#P=0 qCAQ%:Mvz0vg=S3:@G h8U^Vh/AEK/9hӶ_kh:# i@;- F]sN F#΁fȹ;V׎isJAtk3ޙȐ'Kj-`E*ZQ XgEƠZtfjgz4Yf{ȱ3:痋w@g;wt>, t[({r}:#[ CF>v`BwSɠ;Z1^T(*o_|7/uݽD=5̭z+ƺ<(p(Vy/ wu=%30^Kjc{l.Hؼi(6jw᛽~z[2KO7|75wLd .&fi]l/_'"ڔfɚitlYӽ']nq18I7\W`3F)IW7VF٪7?/&+f//MS!*[zͦTMIf챉IWUðG0)I#to;|߅#f;א_ikF󯙵LO 3=\fz$fz<ɹa:g@cK<}Ryp"?XhZgz6A`dzrPqk-Gh(rZΌ0Q;j[F)׭Yib 0 ^y; 5ggd@O@ 1psNHE@'ixUJ':fste ro:MޱY1-}^Youd]d Ω}vZ ҿt:u5bsuC.xZ5:Hn/$w3pܧ3507R`Β׬{/r^0fjl}2t``ȃW&fSÍ[/9{t}9op1 ?ݍy{c Wc@1-[jg h m4o6hA/=y 0vϜgU<Adž]2,ll&.|q?č5p6<4= ~[77ux7c^$>b=8Ȓ[rf|k+;5I^*e\n=l9yW3zrS|Kj69vr펶wFL5Q=F#ƒ:h(&8&&ja&QvԗaG=ڐv*׬:~U'O7jt}`tfeaFWXBt8Kld1$X,VpYNpc6o7FF'Vph`oѪP}gfj9_/.OM kQ6(,0eA6x`Zj)4L" (X{`d'Aر!Q$r 6#2R c䋵d$s2;yz1qegq.4KB:x;8B;8ocCK;`HUPqjfh櫓<6r%Ӛ=k=Ju6Kfmi6+o<H!'c1јG)#%48Af:hZ/ɱIu]_uW\07ҵiUM:ІNs}} ->mⳞgڷ,-Zd]ZLޞo#=+Q$N#gLe!j(q,Rd?@y\l*vi8AȆ.6=Ǐ=q7˻!hk+כ0-(BA:RƓ+$DTJh9fWEeTڃյS:pQHGZ; vс1+yM=Q; b;5`wu.$ ǧ+g9g [s)ԅVsFٻFWAYEm  @8o99XvF9>{K\q?QE?6ES+7]I wR.=5 Fm5͢FB.B&&.G?nLS}0Wia$T c(tłK!Ped_0x_yc? $VñyW):vs'K7e}]wr1q~X`0Tq( ?UZSTbd ?]q~=Nj_O'_2(¾k)GUJ0ґЅ(El>f#Hx q8 y'Yڃd]QD YK$;OK2벆"-$9I6mir9kUp9@/ BHə"$0;Nml欙8[5+:7&OǁioɆh FpǚBf:t9f2'\5TQ33:f (bf d;) %J6gY(~8}34rla]^Qtq5-mȦ[kޛS.m"|h(}MJ*)lYƨOU9VqH$r`F+xVjju'RBMde˱jT\dK!orBB/lZ[fl|fƶF[W[x+Q-1Zdmr.olcHų!ZJ+oReLrYh]WX(S(۬XXO@Y&j5>.+\& of[bE1ƃm:vں֣=صify E'ic9/CWN\b AN2jBS{X6>1BfPP ۚ ,fǑmC**AdPoL+*j"6[ZD""q݊=rƓHS*Q2i)o$, \tT.(C4 Abl!{Ldv&-e0mdP%k7b-jMJ-O3?vq-bVɮv5v]\lJQ,,BTZ'rpd`HV JI{FQ1r.rn:ؼc&6+l*{hx 4$1Y5SUt2ɳn9"|RĶӑrGJr^第N_耓՟VmǨHQ|f ;C^]& :#;vQC%#9h=d,CNJnKD0h*AC96[ JD,iƩVhMJ-^O5r:9?jG{߽*Y+M},!~ͮ) ~k{9VQ;CCtmLFb*d2ڪlsaz-Y8QY>g Ն< u60ɕO9#j(Aj eSdԔ~ț(͌{xnhP J8|(+l?7h ;|{+?V&tC]׭VUcf 09`}=m 2[ik[xE%a|pj;U0xX:&>nd@HJxȨl `ft[$Iutlc}a~ϭkF\ Áۄ/KIP+)V)urUDxJ缵.d뻠I+BEѯÇÇÇ7ÏQB Sb:&``!+2 dGk@!Y.`Ȇ$sIM"gur*ØZgNY}Rg}Xk 9+YonqrݻܗoGh;^#I2[T`=^,.wdzڼՋꀘz ݒj5vdѣq(lEgtV $9v.H@-P#)U."J: +!{:aU A3,@S)9k4.raچj'6+S\j OH)j" iBXcy>;Yl`-3 P_iµnⷣ_{VX[ML/xөn,6N9=IL>e~vFaO]gjY._,54OϷba1CnȎG*1v}2͓/C[|oPϷ:5o淢y7L쭾oU!]_ 6WiW1omgm{>3x^V>1׸y)T) Ε"1Hd@qOw&_Sw?w)gmn3JLe5&&"lhmV>l#&^|-$0i?4 †W+$uiIfoM#X qfl< >vtH-vh]hrjz1{<ǫ|጗/~62۱Z!(amODA `@*@5Rҁ\ E؋EkyG0w0 $T,L1H'TI$P@e)*W^45 PAS" % KƠT(.K"Iغl3qȭ )Pd;áHo-"ӓ:GlKTÕ빸VX9*xg)Ȣ|}R%gDHOi0&bK6K) O3] lOk@kkCCWj gd]Ģ\ʆ<]qRJ#ypZSBάʙԐa͖.k .:!dHQrcϭp@J+Cb@g?Z8$ԋ,lp⳽F*r)2'7Mx23ehpS(VCс 3\MJ.)5.)͞ӎ)v6jz΢_$ލ\|\V\j-jlX`k"iO7_ߏjзrx5'F!~;jzyYneGlz~TG+QӤkv'㳋2uNa@ }~ryŗI(VU==g'/KVSKMǗ_O28_dfiׄWCgWV //y~[. r<#_uܲ랖 ~y繽ƇۜCΡ9L54=twޢJ[jt၉lVå!O3yw=- }e7nZʑZO,SNW{?>y|,?ltzJ9PVyG/}Q咖U¤j=ܘ9u{a 5nH8q-˞t.gg7Kk<^ܳHjU ڪ&gxLھg?dYTزg5ŋN۰=`_h#5Fjwnsv63 IآbzYDtY@eU R:OɄM!Jqi8X2&k.]ݕ@De:ƾA"CeTY{p@J=U+$ I?&FXNy<ޕƮ+qJܝ<_xB?J&iI%$UˆTT U+HWG C}}?htЄWdh=;anj>6l{H|QPڜ޴E-qdARrRkL wm%G ܏ꗀv33E6&/ݠv$Q)A(ֵHy{ԩY^cK٢ї]L2pwszbjEF_eJ48H? ׃9P*/ʉV}?Qu<O,TO+4VC+FHWHWZ]%1bBW6}+tR銴4$uAb(th]wbAʐN8( ]1\mBWt(=]1J:.9D &(=U|,`/Jlj_]5^ ƋF}i G6Bk =ҵJt%tԮ.]1`C+kPzt(9ҕ!])'+Pњ+Fj7 ]1\bN;]1JtutE9DW 8ziBWGޞeG:@2^:DW z1bZ;]1JC+ktu,H33h> CiCӀ3fh{iQz:ҴSB="vkH׹}]np}2Z9ۓ*b(|J:jޒԼmDAHV6Wn ~ tE0L0\C!YFRlqdy89y\31ZP_%(8U`k΍5Ur WfCWjͮWR/?`j3/Fhze'{66+uvd]~0tp ]+Fitut/]f0tp_ ύʺ#]]BXJ5b 5NWқ#] ]Iѕ . F]1ZG:D P7 P7˓Ur8_ҁCͷWX񯓋[rsޜ\ckT&ǔh\Kz\oNF?dgK3~?}7ήݸoߜfOT W;38PvF:hl˿lze3CI`w^r?Ӛѯoz؝+]՘&7:[7z ^h٨' ="ԲMʂ+m];aO1w 9]]ɨE$o?a9_88\STg&lge1$.$ǧP*Ko"3?Gߜ~瑕0?sxv'lMGz37~o$}PH?J2vDmW9zwї1__Z,-o;[&]P\✍{+V5 VwutMJ u_>ŐA?RY =NTb)m39C9n$™1gʜ |vBů;YXG:ok*ו/>r}E>C-mUAVc~JQ4ߙ*=V6ڝ*'[@J )X'wWy.~|~N,ٍ쾟ĺY xS ǣrF뼑oÎ9G7 $> yTPxE}23f $3fhYFic8f ŐS ]1\;՚rFܑ>ОtΈoJ;ͯy5B+,[\8zs7o,:~N4ҡWw{V+5;BW˫VIDctݽ߯xq1qEoD]~@J/%]_ߡq7b+^|>swƝVA{_5?1_+ > CL=fz<_Yu:*[[@Eq"Gu|`8O!% VW2X f/Q55ksHɻ#( z[4hւZ)*!wf Yo ~|4 ֍ %|'X5E_G8:BҏR|!-؇64K¦ VCJq Vh} 2?`ZjKU&8l*"Q,IZwY{E/3lHJ4ap1(gĦ] Y Dn\r^$l,`y!`B6=VR]J y;a*I%CLxeHzC6r`%'DgB|ZS͡mK گ8Qv;u%JR-*jlMy ڢaenX"\Մ6>N Xqh ͢# Jʞe]}`TN!(2^8$E6,$xb@E >{JC.((JGآʹ,%;՛LBWJcj$ @F*x0f  W  y/QAQ* e JS᧜2EHQcYUD2+Pn@ Avm$bz&YR u9C e2aͷ|>Ea(w VCH(Px&TIDIۨY"2!{J[SR 1 ; 0 #| *drPI1 y*ET WY(f̤CB8[.Vj 2LF(J8I9,>-vjY]D"=doH_($T* ~J StՕMH K]Fkx5ڽSQA}Ys䴄 yթJ/%i sD4pl,@@HuH*(_CU+cᛯyk& ԙ1|_oxC@b_"5 HN+IJB &}( YҘxa/nya5¯wR4mKF#-wu9/mqI|F@y@82*},<*$]45-*d`ffSPxdg\?XP|NN k A-)BJ$RFVWeD'Ea8Ӆ~|y{*E%!YZ{[H!3XhGWs;USY_<{U|qgD%*VMdəPLp)~OK{Lv?n/"j`Mň Q֎n? j͗)ɮ }`EfD$K&VȠ ^;=J&,kFuvJ3%Y@i9DHTy Z{(o,*"Fp(<ߩ >&@JY"RȡhjͥV4]VN- sV4Q8T3.Eȑl J& OQ{)p rBy+Zr`'LpjhmڵGٻ6$6،p;G5THʶ~UçdHQjpbΰY]W=UPx@ ,k ]\uӵ2h&&SFg`*am>h;5,0Ժ@$]kp:%8vSz~>|Ni;Á &%@/a,@9 9C xyp -h+;ʥP4Lw` * wBS PvnW= 5IP!l[a]r\sqr@i oat^aXa PaQÃRŢ4Qjk`bU:] h#dFjâHY8zVcVXZ#ejgjRCr'Ew&I D 4.5:s-_5ʾԙQB}"m@vc ;`93#Jf-th8u@2|/Ě8%7,bVc!F;=zJ8KLC@gI%nYKx@\4o7jgC1IXfy4&K1&Q&H"` !zQ)l`X y]0F X@3{&P5?S0a=Zh:Ke&!24,$R>Tb0\e Ln 8|vc &dU}*_RApa,N5˗/Nz;͇&z? gb`t=r`ݟJ~t!ĿT\t90 *Yqq4BK {~4ai+&t0YocGϚͦ| m)036:M)PC1W s e=C`i(`%dh@(+'%KTY@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ:&(\}K7UH J wwNvwhwx:<>]1Z~cv-vӁȟ.'V-yZ!]]~s$t;!^t\3fz1큣owMpV)7n=9;͓9.U>U/\=Zf J>$վC-p}Dp[s4p:u,p-f:\%•#+4ʣ+4 kWh"zp%=s\ITm+4~Z- +_[> arQr*'p1\:9g(O^>ճmw7Q6Eiu]*F+Y]U)M+˖Vig~n?W^ vL،zW،js,،Zu،V:GcZ]mYin鯣UO?/9)OW'm|5./\'0OvAi73zfOW'L\*E3L]7\rT)Xȅ$uBYՅ]Յ0r|wk ];x=:toX̄_f^(_ǷeHTӗƽq S=?TiU30^ALxX%\r[d*]Z'+1]GjݭZGeM`Ss[u7 Y ˛QKܩ\0à{O`âU9e5\OdO `>h￿U짦[%r`V3,311G{T؜ N3TH.}e!dK#a/pe>98.3\[bzѷO-Mf짹^{'_nz=9]}0M˗G7u7~ho6|[4 R>|UTMM_'h2UQ/Nq\SΗZ=Y}-jXҁ\fg /ΚLQ0y]L<I"FL@9cӔ(? 8(1gEGW0fOO0Y#+](Pha x+㆞ؼBT{'J/?i4iiki]]AYF[\ 2`/:^JT9YbOT?1ʷZ{T>%!<7^ؑȨ7G{sL^a>?HOԮdIY)<.Qaƣ+GXae,+Ռ3&$NQ:GEM~}jy5?X(bssx@_ _u"lhQ/AoMJd- C?J=TBJAj IUdRujjؐ͋ Qg>(Οr0 0zTՇuQX*\ՕJWQG_ՠ ͆v?szĤ?]Ej:wWG-tDnw>ó낊bޕ$5mS4ˬTt]sjuc7Zzשּׁs+k#Em8X6Yo뛋[ʆxh {SO&쿜z `|GӰeCmzjw>9l{paq>{NM]ͤ}Mt><Zխ%lwvϖ.W6s-nm }'LT8\T/'M)zu,:9wZVpʶcMR{ ~x9˝ɑسǞ$aO[&S,S,>:kɳc"Tcc.C N6[}H;~bl܀M*>[um3[Ϙ(3QLag^_U3UQLvNJu*Yr])'XvT:|"֩urvhag“gBJY&cP. eja\ma@BLx<&]6Em"cyI/`RF,_!kBYΝN.apvfZ>c7U]J mg~W ݔǧ.{x;oF6(Ah3|絀ONYc\{s--a<>,\{scKLVg)X<̃ !ch).En2,%1ZhXeZw]!2vfvd쎫tyƾX;BEXXmzlI͛*7;`0> 7)1ۮLi(`T;ps0TƳUqEwZ/b2# )6(< m sM}$~ѕ9;y-<ەwڦc6`zL2& Ub"סs$G;e!|d*)`r5L19ULiNv `UB ELjؙ9Ej>i3/}Ǹ \^J wzS&,Dlو;PK!9 G΄s\<ȹ#8<@زf@6kx ~ȩbK1&v! )kF=;2`bXLBziR*3Lkӝ'kߩ_*S -1tgdN-ӡd;;3g6F~YoZO𤹑{]S[kO`іRpMe>IvB}JJsS)ŕ s\a&&P p`".J r\\u,wՙ9;J 4SgXKXT4 ZEvEPbhŕ)P gEVh? k T mcu uTB"hD+jEN8[P{XB3!zu{ϫ10/}5rLZ` W3Ssma=:{fZ貁 $,xc4%a~ϡ2o{ +෱e͛Ay^O_XXA%,}*?L.A?{ȑb?%9T]Z$gb/q )~r(Jr=5=]UGB̕uT +Bd]iP)sÇg+#f b3h4xq G_@b9a ccc4lX>iSF&>iɝ7yY}VxU_mAjuDH5,J}61{e'+r #JW5D)RlТB3!U (%Y%5^ ZJQ +y%gSΡ2TYpY)0\JXRjj eJC+齔[0FƠ hB%P 3ƌZ9:,Ć  f"LM2F=jLJ&ؤt> *ItaKiQN&\\L&¨VN!nwNq&z9]ITl~.w3?_FͲD.ѝv'0 !|P 6@/{s ~!tJdb| z*SvD2dQmHhPCeqKEMĿ7QpuPTIK\ !B) i" N#(wsvE+Oek+u{ZyN±$H=>y :dY;N_v[IOufewm2!ܲOO9?0r f=a٭Omy?nͼڰx~M[kn?{]N%|سi rVzˡrko{31ZG7sqoZGUf𴽐M2~uzJ9~G>[ױB 9*%dB WwãkXğФ埻v}bQ)e3)[_r8r<<S KCC(PԷe7؆NdOx!^t;ŸU y @C U`H?2ɤ Q甓5"A?^BHFmH Bԁ'/ )X٨й$x*uR*ҿl] wx>,/9nIl|BXUvlX`J"hnɟꖻxoZ1vE zb/WW% iyO  ugQ -O.Jmlz:2ٕ},s2dh%M:w?"mj]nl~N5H~^;kh^!8}XDfh'M Of'Ob G$\d_/Hr*V냆b"3YzmyF`& 1sP}P:8G!p)=?Et1W x8j4h,ٕ^̲RG(U=x)3aUvEY5O7Wo_Hs_Hq˪Wg|P*ƟgY/u4⹲?cuu&H2[)Pوg=d"G(#*4&V>R:pLyelTyD>L&2ǝԱe"bދ3s%ʜ D!1p2K e.r@%-WJl20lV:>E"(23}Y|;FdJqmv0^tF4aײ.txc4GctV,6Q7߾{Y ɟLKQ2|Bl~,Yr7r-g\'Q=,-T|tM|QS@e.# jFҶk0A Y郢Hz$EҵM ]c eetmGj6Y>QUZcmS.nƓjd6ΙLGh᯽i0us^.'#ڍ>@-loI.D|_qitQivj5+A`}mP _Y>hlrRELp/'}tFȷh5U:9$opDlD{ d4?gY^A^Cjvb-25X92)|୍ L1` "ZKSTNiBM݄ޤnooit7>.E4]ӸߓF'ik9G߽߻^}n2}?M yT7/.w+ /arCŋQZVU:@TR+//u!Zq P !|ΪʺROX6pԗ6n^/5Qm7{+~q1{ekUH념fR!rvwJY ]PDwb5l+ n=)Oa5! cL2:9S [H lp0Gڡx/uI&ΦǕs/=oI/]Qw/$i%RF i&4|?x}yb繗Ory{swE/ 瓸ydh$Gc=t,*Ut#.]8'UXpj.>oǫzl4'dj{_i/ pIX`q{n\|ƖɞLr~Ŗdc˒38ziuSheE<7L0c`,  `mp((ԬqU.%$(&app>6)/ҭ=ˆ6=t*,Շ1[q<P8HVB4RQ-ұ$)$@DSNRiON` 6PLN>+aֻ(,dl^#%F(KL%r߳ff~g}~pUWd%^c樒A\h֮@jrj%^aBnvXK?aI_vo.'iR.64fme4/a)B0\) xcVzC;٣NKc垍Pq4Ө?Jp?߿߭- Ѻ6-8TN?Qlo$QRpKD ")sQ7\B xj{/N[>L.onae?oVjF4~~1 }WWjc>]^һ{;~:dQxxK.os2vg?Q;::K6)e|6Z֋/ <9ԟ?t_/~:>GQC,h*ZbZJ4䦂T rSAn*M 7&T rSAn*M 7䦂F<>^ s#e rSAn|M 74j* G5䦂PVSAn*M!@ZV)MT rSAn*M 7䦂Ti 7T rSAn*M 7aU-*{jSAn0 7䦂T rSAn*M 7䦂2˝3K錃#J9ݷ=%iY̮0)Se:LXÎ5 ߡJ-5[*kD]MXФV)()LS#D7ڈXnic ; ˣhʯ閻4<[)b#9ULM̗)[drI6 1*-wBQ6QTL_3_7X)J4U`tc)@0:k0ٓ#dM&yl=$eܱw,agj0@PhRs/ IeT2(+ߘ|h%l% 6_d7q]?^IGфRރmV2Bs+%d/V#ܑ62J4gz!g" O *T4*&.kE"/NŘiLothg:n-n!c4pȠ1ieAFe>fp1olJWP{H@DՒ^Y Mdocbh4;u mt}s=y; G$gE +FΊ̕U嬪Z69W(g6FH \+nއnDnDnDn؜ TĚUw"jdUs.8Wk.G齐ω` P@ QVzᬵI WoƂٜ}T&=p6yacrÛ،~JX8p8v;xO-S{LMlЇ a` *XX(Cc%z !u$4m}`hmN*x' .AӥDžF5oP.ijv7σRǘyM]Ycf~(۶7 ߠ A=#ʢѫxOyt7X,ز.}ޮu<{Iz/oM'WC Q 2!# N7(mIq=mV8gsZ&r^8Mǿ~N=Wfyɰc{!*< 6ƭƸ5@-^q!K҈ŢR*fDX.K " /s:йB'Ehv'B[7օ5i/^eѰȺ >~hm8(*9}v?sg슜ԩ!G,%2Sa`%g!JcF-Z|}Y u 1Թ-d]A*ɳ D$|Y"-iN IlNYc`9UWćcL$A;m=r !{@0TQz3a>qZ?!A \=qN9ɕ (r`Oٹ9hAhh/Vk^%ő HZ` gG͎͜O]-Rl징smj _M#HG{UW]Vq}ywTl9ȑX zʛw8b .O <&)#W2MEH-7%d#uǓ_B൘D[-JoRqA&-I P;2fV-62vs, 5XTvu]A;g??og-8pĎ xRKi|q@tJ $&uafQU5 %kTD .mKPnǭ+\ ɣ}?˜;Li% ktuQ[{\r-*!frh#r/RWA .P(M E[IxXPE!2PE Ś&jHMb}C*x:7uol]Rq(kD4=GD"b!!p4ƓHC*Z 2i!o$Ywc]QJ[CQȵʍ3"ġPɀ,5΄Fe)<[~/s)[8א:{%Es\-.xndSrJaRB:)]w EEI%)=g-..p2FPᎢnhEH> KWF1Wq\ת_~:;v5i4&o:8eBet7Ѝo8QnYهq N{5^1 YD "yٯ]Pa> F=s])ٍ>'{-ݴ.Kʂ}L(.)w2E|3}&s(>=g]D #^MuZcbBfOPz. Kث;A֝Nvɠ!f@ĜA`Z1; @ RֶBQ?IP tpmEK+'㘊M8 A GD$\c-AAC6z%0Zum5)jn,rG.(B3 6%՘>jHt[H%çVgqX%"TZlQgO)D-§17>76% ۘ8~Q;<u/@K#_~PI:}2)C>*}䉆fœpdI= ,flhCiCki(FsKWD<tXR&hVzB}7#ིV!$˜Bd΀W1P槹M*E0Fefhw}8y˫:Jl vMv(vPEe)O 8968bp(<Ѵj.cMZ;t4MGM4/Qv̜wy!)'T92bPsYu$v8 r; !4rQEMNsMY琊pSrhwQ/X6v;XQǪ܇/}6䓂tltF5GJ֩Dѱ< /޷|.ꎪGrBB|ryFNmgt{=MrC>wus:9۲t eԊbN idf)YZzor}6xC/&عcm=R0G2ݮla 4f3JR|t+z ٣;3Ir;+N'ɬJEV; \$ӥgK+:#}q;jV@(@Kp64f+YvjoPyqY'1s:R"w}uWs5välwֵuTOZI|(['ۏ5G7!kSGMMuuEO]ђ_,oV @ь_f?=lIo\N|L۟{̷Y>r̫-}\=یȽN(P [iu]$44_ɷ-;= c?5*L?9IiO I+~o:PxB]OJ3G%h;2"l0"E)L%JuY2i.x^ Rj xʕz챥8g`#S W!a"{&CDpD`X{))᠜nb;Jaظv&>!wNCy^`ɻ-"Y櫕g<Oo(+r`a4 R& 7DC6D=h}gZXEYo-:хᐌDŽzQԵg7 ؤl/dGR >%@w8eΊT9;9bJU ~;$R%QI:B`R2Jv$X`e6*%Cj1e[iZnBOypQdSzNUjxnfV.oW2X!X"͹)c R+qeUe.B Sߍ~Z%͡?U:: _"Q+n5d2",h>!f:?7TYEJ( bj'j s'OBӟ s17]N 4F Fuo ybP3|ЖY={m[Aăvmv]W݌ڤ\M2]>r!4pܐ8:cvZeSPq}*%_in[X~l}ZKl4U_$Y>, 0)E"O4L0רXŕmb dlRddx{-jF,bM"JVՅfK=QA"6T 52 {;![DAD 4E ufI7M%S'%|U?v!j() (?x?uɽ#7[353 31N'?f^~n|qv~2[?_)ͧ'.fw46:?~{ꦗEQL]|.BMh9pTa B{rvnb<<,%)}ƒHu]}ȗ˧?ץ`@8 jpj2%(Fc~_(|xˋYKS='s7 DdF-\yď^ɤ5:c8!B5Tz"x9\e^ëI#}hkE{rgZ*fχ }سL=PG+/}v%{~fFOܳHuQeS 2@[^nQʻ/䬗բ&ujN-;KeM͸lOS[|W1jҼfh>\ s3b3Lf<1c_Dϣ<=yx_*]YFwtץ!DXʄcp.3eͨۈ2. ~Q~V=ؚQv%Ynv$D*Y"XSH;b#uXEB>ȡa9$$V8m0`Kmcjs E N|d 8ޙ8[_M!v7[HL[="\FzCm}ҡեٔrB +ځ%Q 1E"xD,PhA:)_/ ,q0jvŠ,<%N8z /dDM4 ic(n{7:hnNJ>pTꨊS^On.# E`!\䋏X &nRXr.y* =KZ~mv'eEUL lpᥞ -qVWѲqI<6|ELpl֧浟[_y5,4pleύ`.Fj*ۯ*dozy657h^Tst4/wYQn0zIJ"Qwћ>#D/+Ũ话_'":J+~0pO< kd=$%U\}6pŷ|8qVĮ/ WOX+Փ% \=MJW pcNfW ܷ!\%q5=JM;\aΐ\}pER\D⃁$bW -ݷ|JR-v5'+ o+X~0pEF \XHI9 +&?\ueP*I+Q*IW_ \q!W ‡c]%qړ$z-W_\IOS`YzY"fQGB *G,S܂ ٙ/c`1sHg[E‰uX` l 1W"ܕڅ## Iyå O=cF@i8+j$OgkpurS)yq˩/Qކ4O]h 祅Tǣo ߽[w,JNw=B ) 8E[Ldz[ǻiݵ*^Mrj}#nc8c?NfY.,e:%C|! Ffj$x1Ak|hpG-,QRACG$PNOroDu󸢆Ef)v23%e[^><6<MOq1/pZv\}rqOڠ$Ap^ el׶z^>ȕ?nz{$tʏ0#] 3IAk<358%.2pܨc0Fpaq)2 %Z*R>RL@قkK%bdF/cL;g*aag3cW,c!pX(=2Y-MɫN-EW'df:ͯb#sȱhZpFsMI"讎̃ YPwաhdp% 6N0 D{CJGt`ZD9J3bƂjvڢcj샵!hjD9; !Xqu#Ÿ*)@cvl70= !OErHJuP$KTR"[envU2G䋨@ Yd )B'YaBg ,d@%s &Rp{Ҙ08aiԧ" I1F̶>Ո2FFl5e Ӱ=0Œ/`I !t4y`4F`>"!Ɓ3 CHLd$&=cu4(F!̥185U'EיmufZͶ^S} B͹BxJV3it@bC8H Ł2a2-V/zx,LXO@جke2 7xH6S+Ev܁ײ_(CT! O dKҒmv]$ӱmŧy^/<9\ AVF@崋h?uihy&]3 j÷n`I],?$*W}suF~.U'łE" /.;5YOU?ELSg小Ƚ, 2IE~kjM^U8)#2$+@RqD 3NYka6euVF,B0xd!R,hH hB`#2&"YMK:JۣWЦL lsT ШktqKGz 1b8(){)W!:꼊xF ݀Y6 cf[{}OGbw&[[$b|JU <N^ߎ.ƇѠ VC< x#Cr'{Ƽϒbs_fp>ua& `55 >(pZ=dw><.k~ȅYw꠫/ax`li]=m^4Ϫ@V=VaZO+y(#41=KFn>gx\v=h QxJiAVB!TɠTh5jmAHmA x,9q0xUf>9xsb}}tW޾\4+ۑaak-t.RTQ8t".,Nm5A4BZ2nC^A1`9eQQ+e1cK1 qG1Bg&a"{&CDiX{))9)wKA*(ar&v0G;]yiJ&enME;vz 5[<Ɋ32v\!V̒MzZb>L )C*F8l-Iy#ɣxoUZjMHi5삒1֋vBHQMuL+DX EAmB<'ui_.UmG5zĄhl|ʁ9"GIDZ̡ 4P|*ln5uZٳ!mB= zv gw!3J01g /{h_rwϵL3w߯D5ԋ4R&>MޣZ:pPvr((,P0B|r *Y$J_H<XxXtP7$t٨4)psE) '",O\{bVրEPWEB(jTl7pϺut^b{nimW]*~37_2|e&\{)'`̃h[K@zKpC:mg$Z:KFI&;qτ?j:\7Fr/w9bZ9#vrrV/B^bsc e & ¼&LR5RK`QE57V"FMsFSs՚ė󑯠Sdśmk¼ #RK s>,A\ٮ?qx~_'-'vV+YR `v)R2(H~c3|Sj~Am~__=&\(!+\HgЃN ުZ7]Vjgý1TꆦH칺KU\U·eo@3QΨ ߙގ`y:ƗvRrsU:>l@/rC>U=.K!G|L  X(PVтJ5+JqW߫2As}*B9ƱVQB0J 8o>$D/0IQcNLe)AxZtJȳ{%8;:CvNppd88e!3̸q@%y&0ZKBŒR6Y 8.Rٔ8׈UD&p Y̊(<头+6qI*Fo4R,bD% $<1H'q"S0$2cG%:ӱ"{a ]bJ/68ڷBa *;Fu/_((+8,|,cF1 )`u%)&-ksy_s\C>8"*2aU%[E $9Q8eC8J#68R$#Y@S@>%6&*+MB:"u!RSC%cp!f5!̤x%kѣe7OX{8C#3bX3綻.jssc̱b:s$9kH)EВ)E>iL[z[drr=8=a!gBXr tꃡfo_LN2W˳UxHG nT6tä}* :Sh{P>ig:o zY74 k$W%]Zg/Dȷ̜Y5纪&j ;FƣTPsӗT//ô5A=V0#*(`FB n/V8Q+[ ?WZem^v-Z]?@a"WBZRjN(0II>$.֧BIo00Ii)=apZ܍`YbV SOAsچ-uTK"]zj!ÂZ(cXZXq%SG/E8m<)@'k dLm)s>4yi\_.? =G JHȤVM 'dSV2cgL7_rv`d &j^ͯ_g# _[_j:lbo1f^"fЫGҫ O%4Un㷙-/]$ +*@ ue$1Y u$ 2d蹌eG2VB2'6,2&0<(5-fdhnxDLMDX[nIRdX?4U2{STYK\8dp!@2&'GfL!J9u<[uwlw|Wv;b:|$"#E$.SPfQ4uB*$+!#MN-ّN~' &=J8e#1\TYY ԤpF #!豈I Ru'ݰ;ur >V';r 4:RjJ&HJQhDkPtQ ǘYo0!pŒCB퇹7m*UbZKІ:YX!5Sr&8BRihj pmqsw>iZnwgUIdhTvQ4c8\AepL #"-PR}K9UT@rYh@]sNpyyĉ U0&cN'CW##=gpA %5"Z /6)()$[B17Nnpvtݻ8>:++}'/*7X>$Px !HԆ6qmʉZuCIH˛δ2J~lKdG7D +;tځqH]7 w4aew/:\sb WDTV9V)J!WZ#FzO apCD*RH4@(4ALíĒXtrG-08K8˰owo!}})-sԑ@!Dr")pƠ~fÌW _߽q4-WFx05"zDșdR@[m޻ܚhLA4m9A0iDXiQr1 Ҵ6xR2>Zi/JLդϣ49݇lf{U:k)PB z8\\pn&P7EJ9m/4,7hY8UI-0j.|VGE~zX0K\'B $dw8~[/uL5}l=~}?`4x_eX%8RTdL8yӃ0ιIhȸV^z FN0+7 V~Wמl,˽fRq}%Uu1W-10!A =IJ:w:Dm\L0U.2W *GbP" 5bBl@HClDB -B Kʊ+Y)th;]!J3CWgj!`IՑ\~duKv(mϦtaJ [] ]\+Y)th5;]!J3)7=++L)th;]aRWJ:AL[F + }]µŨ+ at(6mW+)AV_cpWtp%-=]JI@W'HWK*J pA}WbADd Qr; ҕ⺙ڥt(/fdѲw#!ZehA4 ('FUh5;M#J3)ҴQ!`K+KE1th=]!J:AJ i +, +.e(0+Lӛ3ǟ bkWڡGJriAWf}MO5-[b+Ku)th9;]!J!:AbZdhX=E;uh_zvt( U] ]qC֤Buh;]!J:AK +().t(JMI 6VCWb ޫ+DЀ]])-tc6ʊ5dD\pA'9=EaLDؔ3p9e44(S> &9 H4ۀ,,euDc*Ɛ9{"jl=XkZkr0rA״h|!lE=ea~oҢBZjtI=زrzB./fJQj>'b+,.GXIc-j0Е S]!`[,>kZ D~>'`\CKW#U+lmAWv}Mψ BDWb Z[ ]ZJu QN8=ZfDBWVz+Ag BFCWWZ ]!Z`Qr3 ҕPV]!/jגbAD{v(J1JAt-.њޫ+Di+-zeA gT*87r [3 i\P @H9}vdM Mk`l󕱚 m?KIn#e';(l~>^:?IMӼ\/ț_e{3>r3/t"XU1cx%wU$çWxa?~՝P+pᕞo*gauI kqT $eKu),0dAUY !#zj1!H 0*4J{d8h{{SרYhsKjCUL.Z_z3B;~w`}/PMnⳛU 얗,ңP3 rbvM}y u:%b)5~DDq"hNC:DɅpkoWp<}7?Y{ʲ u#¯S7Y,yudsGx8mFv/؋%:jd՛_r D(a]Rۦb7b {GիR/'}YR|zZA 4;EJ/Qĝt؜4<^^nu#X5 –-6vZTA-ͳB֤eNj QG!{EL9S)2FXBs)vi~5[SK_P-,kx:^\ntf2jwIYާd@}LÆ3 cFWK۝z+:8}6\-pg" 뮀ڐ N~ ?;8U7홭e! 'lmҽk^FaWey=";`սlP|Jׇ[Z=ޑ!+  j>ח؁<(LثuS!(\ :ny>iP6ݪ[ΰ.=m716y,,wI圖qby'\Jl$<$id69 Lw.{=tu ]c>zP?abwܢZ4 nf-ZvF9LQy{#ܪ@+ :zliGPn]5E;U(TV| 8Lѕ\'~[UR=nRxe[?v:HA*baJO]HRQBS[uR7IiT^XWȂ3cDT ;0)L" BuYf SթSr))!jS^PcpcDY&1Z)ϑu;+ҞUggpV0z{$C׎Wcq!~яxfOG ^*υhXZG [F8f.JKKDJ:jDTd M}2 HD3.1yĄ38O;j0,S7ZsgW'wȶѝ'{Spv`V"x_[~rf}AjcWD4 8 1pƑYKn@5`1 `.SD0YmkI$ơ h@3Ctd#^z!ʈXM͈4ѣS>:]qUE;​m#T%)(er)80%fg)$O'+W\p?s:06q !1>:697dZlxl4iu)SdC2 da"081;:r9BpG!ʒIW$tѱ;BI%vKR., >|p(# &0V A m$@eHYr0BHTp[I ẑe=LM5-sg`4)c63QX||*Р֠#:kɸ@hK)g2H57㌻Vz-[_~J ~;E7(Rjb6~?-(.K B,bԧ\KdJq R JFr1d>0{쥫:#/!1 y,1cf& I2fH(偾zqHi]P;ˬ8 `UQn^Hm tC%g8oH-A_=q0^ɘ(s:+Пl*b㢑 hRJ'=ПY(d6{-zT(DP εb)Ҏ;43wJ9)-в nnOOs<0͡kf;[O>f3G|hyzM&k["5o*pG=7LCp몋`\^rɷ ;zguk'~:6ǔ=mw9j\΃y(TΪ `̛05ީ zB<RY~i`jGA}?F-+!dYdĔ,Z&(QZ ^|Y3%y cJxK ci7vO?-W!?6^.1* JC<C3P1KҰQ~`Jp.ya/Pr`T&A2ƣG \"G$gIR*̕tyU9 RGD0)3t+whS NRa"{>UM->!vGz5'&OO>V=-A#6WKTܕՊ_5yxL\TFh㝅 ܟ d8̘͙6 Jui#O?̯О?OHߪ-O9ooM}3;ӟdb;/.?Hk@ܰRiE\ vLmvBIv-J{2HE2<66]`} ?㢰2.{ bp-E ]A"O~iz܅gZ'g~L?'_C;OK3톇٧ϣ1]7&_XoH6=1t:veZNu/'r^Vdچ1} _Pt4>\CAF펏ѧ0ѪO'cOƥo)^{hNd&89+K8pP?N(-b6w5C͡:+?ai<=Ebۿ\^u._/q<:[~>>'#<ᇃQn26&x׀ 2NT*iPExyv|𯗳v ,2鲝}mk_9zs;9xn#MU0xt3nRPq0GȸG6qTq{Ed(JGiag벏A̳ +I1܇dH\PP pA~*WV`̟^r6v"&+wW, {Uo8_TWʞ{έL85CT!Ahk ng렭CdTsjQSfN0P`RFw^Iɭ1x^)ѦjtTq0K>alNw"OKY?m^03&eN"~2I,2 DH` ]EBӱ@ߑý^׋vze]_v`jF++L4W+VF7L+RÆi[d,픮ꧯ_*LDe\#1AsJ|0 !5q$y˹+h >[JmVM{)GKGъכY\F%d(el디M~UQOձt׍o{p,|fݝ]Ɖ!I.8nxJ&Z ϙ"wHqʥr"=pFG񈜑Zx}(xsjlf_lja5:YIFd-M|r9,ޮPyzmУvĺi2E29VtBtB&V K#a:DP%8% %d.B)UH /q/JKʝ]MK/}79HHfƉзDleCQű$SSNVütG:6K3}ߺCЕUl8w+68UoTren h4Q\iV |{=? 5;.܀!h ;\$ %M/e&JHs s, ^LQϵqG?*,JJs%8OIzs!D#鏏Ⱥ`-xo:̓'hNZƴ6sѢQE![]YAT\T%^2,D끶 $Mk%Kh9eه >F)ܳȃ51][sI+vNKuɺ)ω˙!`?Z$p WY_f%SzmF 35p_QNK2 "hZsզ [8zϾǘ#L0W%-Q*ќ=;Bln@ dkX3^ <, 8Q25"tDwiBnUh2ӥ05d(yuoa9FGp5&- 5h*!&j1ȃs1^w0F&+_)XU<*X6}.͟>J/=0})Kv+ҿ7p=6rs}gyp3~q/U& M"ń2.ܴB8Q+/Υzd.3D#T8/4ձs Dq >f) QoÏZHo׭_yYzY/]t=seNWv֑<-Ιcsyܴ^ yԴ^j~^PFqZY.VS Xney a8&Rg,rl&)a ]lul(/IigqSNawY)QLE}*LDJ<Pr""fDO5"?ȉtħAzXOR/1ԏojnK sMi^Py.Xe0O 7Q@b=,MLy.}'$/79\&__]y>YYv1GZɺLūtMٸ/o~!!hϨ2,,Qf2]dpms)_qFd ڴ{[؞-n(/$Y](3' \FvRS p?22Zn+M˜qT##ޚ܃sG`/yQ Zgܤl;Gqz;zJyZW4$dՏKt(kK_> ԁye'Wj}FS5i}.؏Q?&d L gZG"fI=p75΍7Ya N!b c6GOM+P\sOrj 93VvlWvnmjَ_ ]t6f9^5BO r8ٻYjnum(Da(*-.zFD$IP"2vmΑSTc ,~YmM1H2:KbL81J[w>WYo`i\5`Ykij'c"|!"DhIw/Շ'5%9LQ e.A ! ;MBzd]6$9B5։v)J9kZi) z2w31(`\ʗͥdtYnKr`.aRPWkjYU[(8pR(Z* Ki ,XC9nR4ӾȥwL<:PaHV)|D#M G|T0]Փu#G5 QK!("qsRzDoY mfSuR6-h.##Ri93h@Dk-pJ q2Sg$1Y4&'ҫ pt\9_2kZ IM kA\y̵vГz0I˻p Ξ?86K9cð)E9Ő5.Dg\FeJ905%mm:Ѵ[$v m}ՓnH;\}5f\&H+X[$NC{⑄R. &aoSZ]!\Ah[*etP [DW ]]tpΚNWR.)B `Aw'B5>DxGW'HWݦ`K **etPJ r]]!`5trJhe*4S+hiYŅ\Zzx=t ]/ ?7NgM=f3( sm6 ޺ߊ&-_tޛ؁χn?x7 mb{7Qa?<8zx+JEĄ0ٕWgt 89PǪKݥ_IͲ;qEgg-Ɯt)'{8P'mMnG#|57@szWJD Y3pdMfr,WA爐J=]BKotZU}tznZA77)gd;"Mޣ8r^(1C:zf3OEwԝ8).-hɔh]iəl]!\h]!Z]JK )ҕ i]!`"Jp:;6J`]>mP`N̑\zdZ~S|P6@WmEOKWU4ʎN?Et5㭡W57莮N&U|.JpvU tPr hMU,۳vjB@e*伣+ kU۳vBUBXGW'HWR)0WpsI%'3E(5f핮 6T6y Qi+IkZx2ieJWXx .kWЊo1$MAJÅj]!`8z jpEkVDutfm(zvN 8Q 0tU :rjh}sv]UBiU+նDZDW41P+ˎvU -MRNRV&-trtJ(u]"]qFmw9BW -4>DюNCܔ5tp5mwЂh:]!J;:E\ZDW+Ң״fRʚNW %N$P**V5th ]!ZOW %tIҕR僁 ;2{]r %ti 4I~ҺNTLtӫTXy1;؆=0]EV$d!,#TIRu=]8UisgË*U@h>sJI۳rݖVBӗ[Z8 F6ݑHi ]!\Md[*tPBwN0e9h ]%J5Jc:z3t7=?'B G* =>s%LyZ};PrҰ`W+ն)#ZDWHJp ZN8k:]%BwtutŤdBUh[*=j(0ZDWhX׮Jhj:]!JZJ)c=|6pGp֛{wfGQɻ-p jN{wu-q&"ϣC>=}Ƕ&囶O{> "-jAB?׋moẇhMx]O]O;gw2gx|]_ P6u6C({T? wRBtjwtlH?鷱Oi[eL.xǽ/}J_NԢGC3zKKm&cޣxYvdKBoO)Of~~Joo;>zy C`%lrDRQѻ}7LکCGr(&5Ao<:v852SXO_o8a4a-f̗[&~_zyp]zI;:i6Vš2e`1l-N &~ FE:Mkmv>!B't#YM5'f\zɈ)e=/nѺ}9!RELsO5QN .{:҃r=xK32g9R$F5N2M cuNrM'5I}^Q?xj@.?s TTIrUBrl1-,ͭzBB*ZJ-6Wr4DuR#d)(UYøHH^PSJڮPҮsx7vA-I2{WM*)Fؓio2+ \E W_t$p!߮`c6W(Ò?ϣ)NrǑ'bŚd0_Y,|7[kF}㕔.3\]HxS* Ģ͛YHIsVF2># (UJ=Уy//(}KD5 #+44e!L6Oz Ze$[kDKm65#JrG{vW6\~@uJnG_Z+aK[?6o@Q.o& +o<G&x4)]nR~b (Qy p9(m(Bak W7j?*?c=.e[3}OWI~ h'37zjx1B/0.^@Vʪf_Kܽ-[Tl'yTgq_$jHJ$͛9* -Ѣ IZiHI-^yc\cl?{!߯թxAmތN)D-/a8DW^6yȥGJbTSt徢 Њ̃bi^A6bl ͩ2bG0EZT֠@.?R1AZAt6͋u% %.\Qv[0l}RxsK"F} -?O='P#zAW|^}m;FkCgߝ4F\vj#Wi!H BŜ:e8S撃'.qVf5ݡ3J ş@ @}̽Gc JrFЂzq; :1ʼ?y#+IRɳW.8G +J]Ys#7+ {cJ}(<ݽqgV:[%RMRUł ?2EUe"|@f?\!1>y?N`P&7!4M` McD,e(qZF(2$ek"')K@C&d {BD L9nvpȉ14ѝi:)bA1JdDhnzd2N;W-dA.ͼFC@;n~2DX*0P`_`C$`Р[X: 1# q,od(#yr@,33 r(*D1SsUY"5J" e*OϜ^ ^wZܙ_=M7wtq%[UrL̪jDB)QՋWc?6E xI"I&YƋPP%R( 7c^ WnTEK#'J sxgvD!`b8̇=f( PBW54x`㦼PIn+E!p.$¢X}٪?Mq #f8fEMJy_ AcV'f8 Zt֌X+$fuLIE f,FbV˒LnZVUI6cY>0BD8.*+1 fBFB1,CD`VȇY-a)ɖ$ޭ6fuPHNڼŲM@p~4zdD&L}`gM赧Bo;W6%fMG3Cz(厲`b$ vN^1CB-zVxJ㦲 `pXoIƺ(JsqQM-~[֊bX߻^(98փeF:L˜dEز(fx'H$e "Ic9bdY+ZQ0زVp j:ljY뉾_pcP.g4,0qkXIʒ4GEp.ZQLݻǖ5q:f,Cĺ}2]q!//͊(t~>M/wX'!_OcAAq'28!4e$)J-+X2Ł>|Qؘl;7_ISe~?>MӢFbِs(28+Կ*Gi.8QTأqqxëXW3&v8t}{PbH;1C0 &#$ Ƃ`:b[q21'dzղ/g,"p38(¬XzTv8(ω捸!u+fX ƆH:iQ{C $0e,/F*Q:{BQOV{-p#(nFi\/I?mQ @ UISvx`/N&5$dJ^xQ-qx^>bYqIZJdG 92&+bY9[3ELՄc\B c`bMbYOL]9ebYo8ƓAC~bYILe _`e1jXfpUA-sfYx؁'2*rZV! ղ2怂XfؘղКz911cYX|l9IŢ ,'!1^BȳƓl,5|x7Sec9ٰ!B"ulfM,I=ז6,Yog8bIT ՜ԗP i/„:(/ͦ[gh?xTv>L ~:@Gt\ࠐaL-h)`.*Wf9ܻUżUVCcW=nzWU f%IQK" %g*]!y*(T&pI Uv{W-B%Tvo#߷cmZ\)xN&\v:<nzЁ(Pᑡۡ  5"Q#oODPAĤЕF(⢦`B;iD4;IxGp'qRYgwȍ(Šѩ3PB7!XVL׌7aozNqEY 'q,1$f,l76<8e#_yŒ9]/g 3]^?<*o_Lj>Ͳ[WɞɌ/;ƲHbkSR(I;8Lc/C?Kݯb'y9":՘*F6k"cQ{d$g! .)0ټР>L^a#Ì۱z̜3Ht =۽`_k$>55*3E, }$J+]CV<س;,HZ}%}&Cf94Nj8ըJ6Mˆp1:&EQNF׌ƋoeyZF ͣekOOhlyT)CzOe95>zFJꉊzi#/=m/2&JZC+\z|y}hi,Sλ% y!] Ԯbޑ䎰|ЧR-R+aOJt{ZEM7_1͒c7=!r jUXLjkIv}Tg֮VIa1:߼ ,ӗw`ǖhHZ/;l;r+1WM2u(0%GA۪L9wڭ WEp+zNvkeAcv .yYngw%L7`IX`7O}襈Mb,V*f-eWiɼloS~B$`LTS&|@83 _&\,+TIy$0q_S;= o^֗Gq,84ØNfI!0Q"cQpM}CeitXe*Xo9BSMvRP)ꛏ!j׆%fnJP }BSq%9fՑoMl07%WMuAnܪ(_|&W@љM3֢;c7nh©"_ ړ:ɼ `'iCd}r%5  BU㾳-@APBx5G&m y7Ua?3m~HL:EC֋%QcYM٬E.v>ol^\P":FN-1l0Tn _EDwrVͯIBEB%R ub< 0C;D{A%w}]rgݜ7h .xώu7yVHM]zX3ZFtu6#HG|I[Vt7.'^ p I:S+MZЅzBG[݇L$M>LЮr"cB@ԃ d>}n1$MaR2v. ?򴟼J/Tsdk?Ͳo\vu~r0j+5Ɛ+UP΁ &ŃDOeNtP9,!U.8y,.sU Yg2 Ҽٚl3`~68*Ґjlw#l;lNg|X*|8&l=Cj[ms ?ٴfj&̍V|ZZQ'շwψRk]}Y¬4KцL'J+MM|Z.dvY)MeQR$BNL%OxcP3$^bZsyВ?Ink{tsISJD!*IaQJIYVY|Z`4e+߱LvFk^*N ?&LoeagwG;7:9nmIY))J*xDDKSηJ<-R2xAf޵572![ڪlekk ecI#ɖǟ~[R N%τ?ͯ4Ja&hK ʅϠ!t{dEB*UEX`2#`R d)"ND3B#p &:OU+Q 1E@aGDDI΀ZPm,NSF($R:,[̆d%f}eC g--Rh4"McFBQ8à!œY @"E}BG9wX0|ѵn)jC>Gl9a__Zٟ54n!EZ*TeQ-ʘ\'SaTN-F ڰmf'BF1ܭӥ=YӇOS%VM,?r:=rM\7Q`e\WQbxU>ViU>$8ޖG_15s8'x梕r}Y 1NRaxDeД%3* L,”"PC@9l ieTK Y0 aא[ j'!3Ɓkg;vΞ>GO!0Dt] [UWD횔a')YA[VGT_pǚU6 vyPE`hO}6hTJDlzjhL9@OXj>#nVD{:Vgb}QQ+뱆ƘHrzS4d HV=Zd1V:m?BYXu8m1>/M1Mkεt?6}x{` zj qV텲=mPti' ?hI\oκf`Z_WCc >#qb'W.a/ˤA1MTpޢ).1{4VFrކe1ϿqcFAMuz$AԠWrC9j_|a-/+P-WI yշW:?~, 6@hlMWe%!t2c21+ [ ŋCi۠J 0pc&tU0 FHVbr7= *RBeΣwE%bU[/Ό1#YlĆ.J@h",,i}.#VN"yբ(lYQ@)Xy+k)g*3 O 1$4[#q<)oczmEӃz+mc ;&oVAw.?@aP(}3kclH߾g7-PxxvJb@%[{Pqao*7~MLYN0kG-<%Ro)5 .Ԁn&UhH(|k3G`w:+&}5u@Q)P sw b #cIabd%"ĴEܨQJ$8/נf^'kD*ꊳGT@ 響7"j0HIrX95I Bbunβ.F uo0xjQ} /d$tLC"__=_Zj54nYCJз2pIXрlFF{F`zLGg>|V#VuXڔL̼0^mqFT`@]V"8i4bT:{HJ`g\ .sPo@~< "H GýhoԦs 8iA?2&!>i;IMoMZ$^?Z̝fˏH"oCɞ}jz$,j%<>F煝t9l ): 3/i}d6㎸ .6=XVjX<}4 bdPjǟۏyӿo?"D4‹Iel 譓/H,zw rT[śhhī+^\.mĩy?[}t~,2=+/Yyᒟwnx=:Fw~V (eC5xy{Ȼ])JkI]ۓ?t}w\q/!ml<.vA5"-|.n᭩; vTH7y>H]3ZDO`߲ѽr^c ]ZqP`5DxQիwr\;JT*啼JA1B}~lcVjhMLa84uhڶqG( \SλE$^L7&*<)vRc{"jY'vL"aXy^o9*섐ަgK[A" 4of*%WԵCԒ'vE+:dHɥa U'(BQhbפ͆(WnNлF"!I˹$ f 2qN8La)JgzV }k*~/T"y'yR9vA;W8)&|w@e`% V%OdkX\{nN )wiE} _I.X6kc]k.Qpy,-t)]'<>Wu|L$cEs{%O`KQ2ф LrE0ntonjq'R8Z{ (qso!\?Va^RIW,տu~YO14Ըc/˽S(;{WD E" 2flܤĘܴ=GjWQB1L0$!?6.:F)8*oZc)⺴+b5kN. 5?(1|Wj$=gOؖ(J3Q TD9B c_#hp cINph:-0<;g:g2fU Cؤ%Ξ>'(K)rylRCFF.:IS)PCkќR@Y>gؐ;tAOaî8XdB=j`Y &AU&9dSx WdN'Hbʄ3 ^w>:$H<~'!K7_/} bAYU9-;kc'p c'0tpP[ƛ` { a:MR-WfC6( To[Q$c:ϙ΢E@"bd@a%birbSmRD)%"m] U]ƀT…ä Gw,|׆Y+8Y&] !,4\Sxb 5%-dINdBKp'}njCQ;TjBzþAJ" Rc@@"EgI{8+ ;;> l rlbLQ4IY}ClR&=6wUhirc:DfB3Pڽ۸5_,6hdA+Ȣ:?qux6ZPh0xjvr$-_tma{[ߊa_gҀiQ0O!klgQ{$#<=KdX)IiSF&R!8NtI3*AOl1)j5Cr (_jgr֞GuT܌Bu,B:byh!#v'iٕ|@' f5b$3aAw1#kv;5֋&nv4cGRj^]#($>P~4()T>Y!)uSjWcV&1Xt=- >v;?/3-9PJnZzOc`is@v;KZX]dG RoN(J|񆍘 THڕy|M 7ɂ]WuJQN R3I9EBzR*=[?wmA.Hv== nӲhK)\uJ\i%VD8C1{Xu18mMrΕRZVvc˫ո +H(Z08,ʉj9bc2MB'?9:"R)&ooQac&#| n|LMc VwwKt@mp ՏʩH{/Y!S {w 5#2Վ\IS:ّ1)0jF oRsy\)FMu X-H岕%5 O3.&)B/]??KT̽#N=ͨ0}XBʋH~ciB  ]= QQ0 n%5 8X]ߟP]tޔh=|h 6novGr번5˷~&e1ǐɃ f ^, QXtaY5h|ob9)񲴆hTkIb:)F~% /M4$/&7Y`w "J5"uNp$8տ"ob&/ TI?#%WQsx\A\VI70ĚY'al&i4lQA<#f*yvd?9Q0 t z #E1#TKBn1؉FTOؖ`&p7QNgxHE0-dy&쿠rNQ-ϣQi} q2,Վӻ̄F&n2L Ŵl>ITӖkCqJU֩9ǺE*OCF_%?II* N},iwH>3!}=L;o#K.lCB쥇_# /#q:(n%9 kK.u,b-Knn)go^ `z\*,Sae +TXYV +{`)s,e3mI1X)NˈYY}jĢ"< fBrrH@* ٜV5{Ej,s]- _<.^RN+~7|Rix;d/?-kY6h6 }P&MK\Z25:deBYbhAO%Tn$2pCg$ 6䑵ڒe551317QPِX͆WI0`@8Km\Z dg:7%#@X)%y\:jCIeH2Gr $W"Ҏ/rZnx˔+_.0?A2멎ЯT.vI|R|R!^cm~ѓdٖ->YCsMX{M߾j+S^ԮWVjvuʽC&b'AZK)I*V@* k ` ,gv+c\6fDt?@1ӈf.6T7;]Ͼ/g\G' ޕϪ>k:Fa,IX lMC/+SY_VV܎C- [qJ+ARyJ>Ca)rNKҔ}ňTXG3d8! 084g?Uv' Kj_ $$b\N ypf@ X±{7ÐҢK]j!ƈS_dܻz0&,Dxc&l<4.tbJ0#˄7ᮼeh6Ðc'ayIrJD :UguN sTgoaw? Ttq&"AL{ԡXvj^7X&mLa0=4*)O@h 0:ᰄAڦ92,z|2Onw77ԆA!g3|qR=72% Ϭfo޵ZZ1 ʄXÿhC_+]B}hע5+ M+1JDJ tݾ &\cUFYCIQVd*E ȋ?ڢЛ Ц=9mO2-U y=hsShy%d>$ :xO0pwo$9FW^>s~,7>LڱLQ,Ԟ3W [c<@%suG;w= kv,r/ѷ5Õmޞ& xNjWN>sZI0>=IFb&W:AћIi-b>zg^f UB2xgPPJ^K5fv>> Rt_VYB(kB)0T47XIJS䰃QwsϿF7޽\ c`\ sH[]F‰uXTJ+▱[Np;jWo \GeXԭzsyb41՛,$P䔔.Y}p  8}R!Vо%߱s|hARi pFоV},зG;y\[ʊt+࢏3W܊U3ŲF,k/-z^x`pq3灏ntVT'@#6xºxg8a$?^xMTq]hE(̨,b\1L["Hq j]' >d۶y~cḨX\fg+zώdz:ˁBk-TKާ},kJ009Dx wZ1W0x{$l1_$7Ct1_MK#>p=1Y7=A}0%gar-pj/`Rt9Yju nAAYxJ$D#1V2<7$:,2kHcxc$l cfx,౥X*x|'b)ZE::Fv[ E#3*uW cǙ$ƒ OQg(&>~%Js!4٬#Ӯ}sQ[4Y{ؖnֈ'0;Hjh?#<wiAS"H {4#ZpJAXGˤL3^ȝnc9ц }ea2qNI2 sF]x(*~Qnt~AgbΥx8 )![8VRKdBj z3yshC(m+ WDXdRҶ}ZTJ|u___Uj-k:.Od+r7Oяm֙Xœ~:7XzZQm!-(=.9Mr&c`ؐ`Ssv(@pw&yg%꫸\@]xnmİIiJ\J&(As(ј; vkMr+Rz+9*wl8J7U=^<6P2%`s0P$r8jT,DDm vXRV1}5d8j\n!1814VyAL λV݅ 󵡕voqa!wq*L`~V.<,@9oRYr#V+<> ц8 ">;)&͍ŵxVuϢm4_ zO7fXdt:ߔ2[뺷\缚em$KROZZ sovRuJiқI?뼛~9.f՘_ bvs{5[ur`0@ik"VV%8sAϹ9bp`TP5xk[qDG%Mp>Mɺ4øgx洛`}8Ifyua_v6js%EF&bKDC/Ϩ[XTR^+m^+U ڃjv2d8Y-q,Cߠo28Mqʇfլ\\"B'3T#uD.OD8WB$QmG4\n=zzUM%v{@N&?;6;hOw [WȪNN.;hjZw^ɫS0Zt {jǖV$) YP0ZrY8v"?zvRJo8;w1òIBihD s' @e|]);pk:Ρ0&=iŻ48GH=/Q>.NnS*\F@fp4X?d0a-#gDp"^yY-/<~ ħEOЇ7"p_9kQ):ẻ?`_(ZUD<藴Vs%Q1CҞ/ڽ%E.fqRtL<衤$p5 PK";hzL?7%KcVKyHj0 ˜VPHEEIkZUbk;[܊U[3FN89[Yרn|xN]g#M}ȉVv#s[qiq~j,%;;R) }YsȮ@cR(Rs{8fB!cɧee:yca (Qicq 5 !zϯI- tr7VT/uQP$T!)B"AcGRyB*8ܚ&&g{IYL1V' Dn;GM1E9JC/W\U[882]O lnV/TLI&:P6CQN{ mw\(H߾*+R+5{ɵ g:CNͣ EDf|VaY]#gs:Jlb@VK 5UIR}r`=&q$R&^!m8f/ a&Y}㝂CK`P0iV]@ ryj+iIecDz EF# l8iUD 8Ԭ-ڢu|6<$ .b*:Xl0IʫW;Kp#;VXBϹsD7$彝ȂZ"^gZp /xQ'*P)Ô$h?w|3MΡUm ]ѹ,JjpG$P5۲H{>K2rTBYQelV"sJ3,)V&MM~UR2^m:2DY_=ZrQhwHiE{-kLF@8aa(Ac{~MANTVER^&c+ ]]6!Jr>!3Xԯui Ն8F:9/C,x18.6y5ˇˢ|@?hZM/!ϒ^ J: T{r.ﲪ$ɀ ߍ {*~'{(}jWíI; ~%j \>>b-]y^X!Lynou.]+}ͪ6ZEC2rqN rFdLiڠ$Ҳ+Sbb4?0Le _itуP_mj>u7r{djHU̠߳w- U(hcS:qAj^J4NLVkx?ӼG-x_1Oqwz[YKC +傭lqi"kE+SkgvPqÍ^jJ vdhnz1 K[v84%|O s;hޛXn__AE xacQ,w?@7n:yjZ'Zsd#FL[@mr}̕iW 9×Oٻ߶$r1wWȇE=0b>~K'Y~մl7E,S$-RfWͼ'j7?{^涺-zo1Ǵ,ki&oR;o8iuܽC}՞ <5o9N:V@ ǟL4~0l59䓕c B,[4EpL!:x#(˓ٞV6 #oR7G=D<֪WWy'cIYͬ̾AWQ}Ί+MX7^sNFG [ \{l5⭘^y*/ޓNT!a^B4=y:r؋F,c_K\PtMzb̛{AwO.2cƆˏ#/ Źo.yъລK?7~]svbA v6ˡ //X96N4r˷h#zk<{DmE6[J[/gܳaBb8²oCd;FGM8S6,"RcЭ#z ƠTBR$تc~K3ebnKT9Vܤ#w;t9,[ *D@ "TUٔ$ʄ"Iٜ*FlEHbܦ$ AҁfBFƒވxCh#C5"H*+BD Nǃ Z8TfKODzL)iqGh1њ68:g$Etqlbxr""BGf|qG`Z|,ڄ0b%n5mmJ#a[1XM AX1NÑɄbQƘED);7^Mc" EꖈF|5= Us_YG`QoyCPq'R*Txad73c[​k=81?,I-iu~:Ͷz;9t\py ݻ`V&{Yey"M_g'm†6T5VjkEzdKjmiSEX!랄?[uU|t%Zvc_ܯF2%fy+8.XbX[+Y8y"@i#c3(J#>q(ږ(9Т#ho_B:GxuX%"&Hd+"SL}1;QЌg)#[N-,X7;`:[DVzDOaVh`lToBe" F3*1#zynFt!ecf!B-"ƛMW MgD9:b-VCeha7˓~~9["Zm[Nd]n˻\jdr^s؎bۅɍʲc)|yVVȹN+<ݦxVYΗ}qf9Vx8' $] RNaUurNQ"%ȯO.2tVVRJ2*նa[LInBisVuNKJYuRKO\Љ>~]˛L17nM-fe=Xs֩ޞ,g'~mD}` k O.nk59WNƈxX/ ]GR"\@:X+j3=u9@3[n̢sYoA[lf*c.WNM3iI!HKEnY)޿0UAVǨpbEDWsiW{?.] ⍕t}NrWu{\V^mj.K8\WBw`dW6ӣڈWAxөb?aW`F A7IL4yΈ> yāsl4paBdуHQ f^39&hF(v+ZRd6Ŗӽ8z>N9onNbHY9˵c3  Jz-pgCVdx-̊^gWsoXhB'x 8\ck z훌Q6fnrH:D$"ףQBaxo %umE>Ek󐎬#1#gS173+d`qqU1+P CӦAN&0ub 1q*%I wG MU-G c7h]:pPU +XRI&,$ÔԐTRuOZ3g9(EZ#*{]i@(]54 "*O=`/ mHY};=HRApRyQXBx/<3ċkdNeT+Qqf1{\$$q Ić E5V^lWceiYpǂ ףSOfDɶCi=V6̕0ب2 1X_'_I;w p)zlC+vbW1O Փ/%Cq ct>ޖvq֙VduU2zkqBn~!OWZVQt*4+𐴅~Cd%S:ϊdiU2iYfQfoöFi-Se&Ry`,5WuD \Nӡ6_5+d dyrht>RV*5StI*IQT&MA3]%-߫ͬ*Y±~T1I5y6 `CS Њ-ےn4CnDe>-Nq4ē8=^V`VJ/)iqѷ6h:~ khG*DHs'7`:=a͔oω.:VZ'SŸZZ=g% Q^ȼ.QPLTrߏױ_~̏\I]1pqˢ;Zt l'J>*'"ܜx@WyR1yXd~8jfZq\{; 0 ӂ~ɹvA]/hȀΣPmG/ӿ,?n|F3.#?C(mӮp0yf3M"#}ogZ6-mo}yT)+3Doܖ' \c$BB(3E&.YЇ[&cTs -ΚRQ57r}Lc-V̸FA5um` 26D ՠ&A tJY-*HǣK c۸>$Xtu@8m7nK}%X3n\"+t;YJeXaK%>HL29`fs[BnDEX-<>&)6 <άub{"0 G=kIdHOUG'z7|@C#--Z d1'[tdkbkgbyJ8O0)qR H@rVUIU@IyZ #4+ +ҫ96OQico˩kZ+~pWbLe$XK8 [Gsz#Nt\g$ZeRJWճs<2:(HH+Z%9Tjs$eR(5]eYFE ӊĔTS hNR%K)ؐschmmPY(ˍBI S1ph$g̍LA[KoW*|XTr~G!/2@p mB ŠK_\-qq!sR8xEZ j?s="Q $*Ӕl&E RF0IYpQ(E{u(-$3\TI BZg^0nTfLIv/rhK;D2 NHϾ,7 8la&Ml@/M_"CyVYLU1d#iQI!6Uq(Kjv/ݐXȈ6K}ԵߟMFKN-8NU: ,PɊ"!$U@S9$IrdtʘĿ*, 芗2p:3VsD:NGR9h4ɅjϧH dB?&(1TqoR!>I.di 3ô1e(HyIdS#Ih& XU@ p/Ϸ2)`DAӒk$mBcb!YH:60tL( *HI֑Jɜ´HH6*Ń $+@yQY9ʡЖdڦla89h?՘m.wb?TdsYYYAnRaH<3VZ =* j]C0H[CwwUXM+4J&qkL., u_dX$]WVD>~u GR.:D["NCElld˘A7?DY:dWBD6+ G`_|d[d1CmhfdcmS750QPbHl[L=3&;H싓zCQ6,8o=Ȩ%썧?{Ʊ}[A9ْٖĿ~i](rHJ XZ﫮Z{nK*sgl1}|dWw 8a .iJ'}FZ1fdfȳ!#ũ<#\&'J)Tz]?hhXdQd}- oloQ!G0X#  v#K]@4A؉=?1K_/B` 8{:sUv@xn 4hW^Oxw/vҐga,,p;n9 `̈́X!Y2+,BM#+KլJ?hS? X}s.[O=w`dSb\&78rmBWtfd~3 sq ت(g rLzXkRpleNy.]FJnsD.xxHVbz-{ {v}g'5^}XDQoۭl[-lJEA=x6 gcXϊ&}`sm|ԣ ,hu}뫦[iOrQ/{oeBkV̄{۷O߶ΧuwM 1\wc PB|& .'O^r"u!2 _Sڣ&,4Ai])1$E@3Mi&gi䎫 Iy+3Oj0x5N;L-T#5|b W7=`J^pfj(ĥ>'h{U#L16`ʮ*FGAaD"\C^mTr#-ʠs(,0~Wۣ3U" ĊNm,+3MW:r!04aoV]ZrAY\ZsZ]DdUnb`c5Lh*M6d2[ZRѫdHu!깕0 0()UC{߳] 2!bUK;Hs8\Z0O;ٝz3!ap-,6i9fRr1b(ULaAƚg'Q.#(t5Hͫrt7;~Ols)sīJ4i |'̣ǭJ兖rƆf- |k)mϷoe~ٓ޼~(A¯z1Rx2XY͗7_t=nĒ쟵뭲Lނ l;?t^{u7r,<+"(a%nIӰ[9t{%%xx #l2RLIp!nv,] ywdռ5 Bo%ȑˀ,@{Rd*t YpϿ=S9hj73`}őDt^))6eKe[bVP".X( Wh^ZZb DYZ2%kP%f&DŽ閆J;cH%$!Xt_JAAnPK|eR+;#@kդ\n'A3^r*BbNBGBq+J%oCYIwoky8s2zImL4W7L" Ւ@^Jr *G7|>͓Oɧ}eu1h|9|n5}o>JKIkhir"_(Q=,g$kI+NO56C% Pj)7$.NO;*&DI'2$6FJXEu2)b/98M+]SsǦC~7CM܏IWH-ݾ^!33|ț'1$6\ N 3kQ؏- 0DiP$ĆviVSG1jrj&{ k,m[\; Nm;2OB4?~DpG|VbwnC,hVŴӹWlKPMw-&-MIH.f}ШkS 8QȹAF(LPLђdDZEEA$N}8)LRвsӔ3X&XLaA%YxJw<_~Yl8MVS(a[CzYf>oD Ӕb\(n5 QYP\R۲ӱX/pUTVZ'c*vLP8* Wr 1$Mb)8X sXXdxSwl;2c?Uٛr/ZtN9PɗяiwtKÞy#Ѓ=#=Βa>{6IOBf/3_9P^eyk-;70B3)..1X8&h׋mX@bhvv7}֐=nO{p;=|=`X:ۃwNχj+T*Fd7lg<>1c oc9D|#]FsLt)YwA޼dG3=b~'rx8#fU*|o䀏Un;Hؐ<=Mh;QEEgÑ;v.HX33NCD]1s=V.bخ]nv# 9j䡝.# sw(^yegh7C-UhG!||YQ=]v:nvMYn2C Xd>&huƎheXMgh7C-rWVjެz^i5%FElL&Cє/$`!{\?I>KO sa!7uMv~pyXf`DT1ՉAwĆMC*Uw~5,xAʨaRv5cC(r Q`եad%LJKONKKϩw1|s> rH3},Ƀ)U˅'FK${Ԥ=כt7K^I7 n$`okr3ޛ!a7d>vsYFM톦`?Fϯ*jJ<}̥q4GM__t ^-RjBn!{T~-)F[ Em,"j0f!ӈ)P[&u!DDT*k {6V_4u3rR$P`cU7HA$z7X2n"Pm,ۧ{~5mt7KյMxA|h~;z/+ot=W}эSp,haSUoC׬"SN!ѳ/H) AP9шPTB(SKQ6D&BA"%)0לՐm}LVk5a hFZڂTkEKUBFAm!4Bk8QR JJ|e@/0B̩\mbTZ`9 k&jRKJ=oR**z2H͓FY0R,UZ,N4 0C)4j]Z X( Yy\5t?:ؠ䥈vTYܙ =b”Dbm͗ZmUm ^j^+sO$xid* OfTUFQqUaj\S[| XS;±ʪ)\PIzP,uH&&|L `c%B=AFwD-?{Wәp}Z[˦{>Xih_s32L*#!ܓ(b$o$RGFb>Ii?(>z*X0B1dc`$HiVᐒ^EvBT  E}p9"F$33gGzX;qּPo?-'K ˊ% Bv#TAY2LBޛO_q=+ 5/ĩt'aUʑu[ !G'gz)!1PVa;?B ~H^ihص`q#YP 'CR&US\@E;0YfH{BW"5S俰3[1 "5kQQJc78: (Uq$Lebͣ[6.[-vm2ȼBIF'@9u{N~@)]ZԷԼ~y87A"w;nR3 xil ?_gy?wqS|ƥ nCJX Vdl)IQͧ/R7qV*j; I^io*+< (5 1M!;=(yؚ/w@5@.V{GCQ[0s9ĪK\h5ku@3Vm3B\ 0l hYE_H(B"¶@떌#3C݁}].7k^ҙ#4: :Iz٩^'=D-ϸGyXq]^Y'3nq|# "{m50yOmPptCEReA8%c!Fv*p:Bi9f>SWaƜd%`R6S 7=bDD}b)SGW̊f>rL ȷW&&dH㡧s\+FBm.6uɹ)\YҚ[3rNp؇+TXk~>+'jifP084j('V4Q`YsxmuTHA=QE PѳM+ՠ#46,i b}QuF=ǫޫ/6b<$>y57+5HP" QPOfč#cu$P ŷiKQM*;?׋wQo- JaRtRyb;T)6o rE]nKDnk('HU *YKCͥI3Pٟ͋pv? `'1vƽG};^d{txPC@36MBż3٢0![ IQXy R+b `j/ F,a9.E~*-xYL{=Pޗ~Ÿ! u5GvW xi;ulշlx\W7k^ cDaRy[κg^L*?sh8εw;2#)A5<ת1q&nGUR7,>fQ=;pZQPr,lLb{CQ:kxFO Qn#BU-{ccoh#q83F+76*Hja̦gnک-CL;oȒܗy@{ ٴM/ZwǓ#"#//g}ޕyvDtz~ҷA:\|_ӓt|v|Q@'*XzdafJv>/٭1zl|㹛gokfFEǹl> `7]tzhj]R,޿K٫M܈oXbG{<+.o>~q9Q?6ALqC..2jwe}tCx#S7QPRHp͂zWIJoITT7JkY:^⻿3fpq0n74zv$e56k-+*)U 7jR?:խmKp{D<_f݆G sз8FZ>|zv|rF>I#ȧ]h uJ: V(p?}/ w1\cC7?{YfGGG5ftu*.2Ҽ\ca\_ chc !8Y aV /$ERZTnAl xK}tOdpwrԋM` [ÐJ#5G}ls1R [&֢%pQ؆m38 zLPF?uaI1Rv*j_2ԖlHnMV| ߉L,$K UXL5Қ͸Z<~?Wհ_I$@śBےC916\FڈtNKJ)\?ue[~[#+.I)H]ShҲ&)4&QȁL"#=oaOV5Pث!!+ -}`8`OCNMb]%P*+זPX~ٳ؈4K#8S.KꀱeL5"J*H_B?$VRKVUU(T+ܞĝh{ԪSfmڶ AI憏 d e1X5W-^ sy%%c!Ųd|6Bv[\N1sBj@ 7} &*tptD΅HE꜠:+RMRiDE%-* q"b e DH.u f}krlxLlI_}d]=  ZPCQ]ߵb&$[=?\G{k&/?g">p%U*ļ %ox;]}!8pȗ#a ŶZ @pIugqJ$5iz&$45}U:XMD52B˩ܜ3L<}.iȫ%-(R ,!$Hޕ^!}S%sc$KǏ`I2<%CU2|':B7WR@sϟ}??"}zܿ!0ۿ67x t!64PT]L0g:|"-{E2 FIQР$ii Q%GB0<`^C`:J/%zф Xk4bμw+ 9t  Rcb&Q.(Qh%,y.:(*w6)˃| cގĹT%Qr}+ 1zqf" A -8.{\^SR"Y6'~ݺKq0H8"?j늻LQ[1qgb3zο|vE.䙚rN`oOW_﹎n]chnMn %[om on8wQ!"D4\M49g`JP_ <孥ZP {rz ІZM7ZzhF >-Ox)fL -TF)7,™$LAQ EFtx9 Ge;JVi[5|Lq|wY80Ї=E&EpfrmD 'x/UZ$h!MxBr ]%ld!UJ6RN#BNV+BNgbGܳ7k;wI<] @Ƀ$4HI*ƒw||Ot‰ J(*Zn$IƐXJITQz#-R/]46HR^]Xފ6Oa[TKw'ᓦ5@5Ž&Jڳ LtZh3 UͽVR- !\=e4 ȳ*<{mۻ#bp_FXw͡Unrv8^n)^و ERE7MR!([Ea(dH6Rka\^`=(yWV_ Lze|Hu8Stq!!7O]Qg !ԏ|WpBzz^f/j23+0Ԕ'ĉT8'2K h8mq@qhnND1Chl1h^dVڗP\4Ud8Hæ{\|SL#doWȥR,Wj2]R@+W:gg\ E{A?n,C!߬i߼}3ţVǦ>^۪s ;q#ݏ2uK?(9k=| (;Pq-vfL#.׷ho pN%\T`쒷/ղw$M8y/۝Pg6yj9js~y_ddS$׋@4mz2ј1ߑ1Q͋S\}Bк ~]ҵ\vK׮ڥk஡̄DrLV*.|Xa_i~;w?Hip4)]h\ith堢 &\jxaÇZtdߘ57 %K'\(p2.ƉҤw/q`=gr6?B\KB]biu8{ vؓu=dUNt7z{Î[t/n}X@Cq[0bM=㫂 NxLƛDi-o>sGITY%xocd@K*H&c.+-M Ĥ<:iAk\;1xob2R{Xu 8a̩$yRQA8]ģ}OJM%wsQUQs0 8?b!M~ ^?nϘ~b=lwvO^`( rdzJL[0@f EDq2 EJS ցAQM .i8LkmS2w(b&˝+M"RPRЈ,)ێ Itwc}JIE['g 9J.pc1n,)zƍp)5s ArgYϪ(M=F@_;2_GԬAaR0(ܖFQ$)%n7VC(a ,y!,jɃUdhǤ,'H (hHo];Xn| t_Z_쓯oF~$a>3l~AQ{}s`U@!zhdS1~|+QT8 rW0h8p꬏fpr("v%ˬ->w >잚EZRfvH1A"Ԭ?;^7 VYuČéO>=1`ԡU쟰JJ cAjFHw#(.v:ۣ#h3`IFTU@ݍ`GBW_59GZV>Nf4I@,7Z*87X՛\HQ~<kzte_7\,Bӊ5YrB$ S4o``.xNQBqf@J&#=F"r1C'y:o߽Ufʎj;۽Uƪ% Z/Xa!q%Xs-K2ŽUjR #wIXO9+izkI( [ˡƀE0[y[3F #?޻/S-3џ;z=|Wa'8(!U/C_?:jх՗0)aLw]ucs c T1 f3XǛTy]C] s)xJbG|fhY~l0_}M[sK$D%Nt9d#Q9?`(bm#PfKY{,ƊpFTlCOKIN9VvB` +,?WBڪ[6/Ɓ 6?~ݚF8; ip H7FhL]2í%uJ^]TR"G~>"\ǂ%s{S~_!gǍ(H}h>Sa *~o|pNc?8M@&I#9, #álJRj&w椺N줺NTr $mc!zn SC@?iQ $HS\41FTIw)Y,CQ嘧8HQ{و9CVB́RȠ:Ml0VLn Oax=FS?t)&{~jn(w}+*w|;;%g3cq թXq4xF+u#5.{ouD:oEo1&?SRR>u\ :3;^Qա0ƨiŪrE%z' n#dfKThIXQ &R@1TӚ[;GѬB'BwH2 :q(6 ?ŖI;[~'q鷺W!rOR"J$f iX.9rL)l,ATÚmQi"x|{U_6P$ mEt@(Pm7P$;hqn8=_nbSQ Ȕu#1LrRzK1-<ͱs(0sB"y’X 9[֍u h5~kk|u/ d{$AIesƟ.Ue@9 >x't5?.-I#a.P`d|{?om7[mDMFYfMNfZZxGY5?t{j*Ix" ݥzs=X`: K+֚#DeYQ,H$v#l֚r{B .Y2VpW*թaTtjc| 碏W?,ֿ©Ϛrg4@e`o@0 JzOLZy(({Ek'(݅ϵ`b{ZTKZ"Rr+AM^ #HfyX湤(zjh憁H[g ˍhަ#4 opy5 #bݏ`B;o)Qi"@s? ģ'MFXRTUȜ\q]W;+J??t9ЦkqtH}[qbpi&Nn' 7r¨ cѐzTEςDD{yIV=9V'uB=9 a 1BQ#nJ${7}P#ZS]:&i*IY x^6!(X6! mLl$ZzlmS`IvlO(+-(J>鰄8*pJ0B99>4Xzde{-ŏ.6ni"4j-mmci3KLYzz pkDX#e;Ұf fc5C@6s+==3`BZ¿Ji:#)o2TLCx. u9|zGRy7AF@wn^*!%h5 ]#㦙Ԝ$_asKOeׯvB m&tlB&zz }+0Zchnd<#`g3b8x1SaSэ֮ix&z벅ig yi@ 81uy To~R m(#Ikf_42S%&2;Ӓ5vAVr |>=J\3Ù9ğɧLh ҪTޯw%qqT]mDav/adjm_Ef~/`j6g~:JvbLkef:-qJJe.}$ӭ6%D7eP-J.u!wѾ>oc;*>Mg~qgf~: ϶t-3?Zqgoz-sG+?-Wg{UH{D3_mD߇A"_ |sMщEPgGm{zhD㨓YO#;z>92k@@Q"O;y8{<=_~ ߽;c4?MfОwW^Y_37?w㦟_w/G`00|4T?W?BdܫS/~=ϋSo'9-Luo,}q 7/}%++[0Jynn5Y=k=c$,[54 1 yzZ/v/GІ"8fg)N4`eBH&F&yy3T ͖C8䀍Y![rȻ8ZͦdJG kM1e5}-cR5D.Bj~BNB||d ۷oހ}*_KEڦ˾Ut}`)k:PQ&!nijUX5˪vY.n\V]7VVN9LXesB&0: Kʪ$ S+ DFYr}YHR-_X%ӨZc@gρ@J\nni[^@.fӖ׷7k} |sg ,cA,f 7)x}IJWMʳ)D6r[-l6\R8whd%[.YKK~nD:ƤEVfL9 ˍ WbZ.y;,R6#A_MyHARA%[. PKrɖK\xMH6 e))aY, 1# 6|]\cjd;By\2E_F Οj0`-pZ زȖE&ղȖE,2=prqvio9yyƄd^e<#;DTBJ>S @0$VyxiA+۴d-lYdmYd"[Y'-BXP K)]Ɯ1"gQW#@-5A;hu!_VE#kY[wYeswcJi*Bt"c0f`::ffk}~uzSLwń;ѝ WY=|ObߓXZw3;2bcAQ8s"l2 ) 2N r( 1uM-m`}4}PKO; 10^s]9kV<32|h1÷TM!;,#tv> D[٤o3 GRGQVk&4@&6Tv|S`]-H#)o2s3rU%2s lbw 5 m-,B{ks?Պ5AӯĜ( cqP|n]rn\2wX[ba[Ȍ&Hj#5S*W +SjrA"b E!j0AKaJD<4Î*s+6C#Utiཡn&pH-(C1̓a;00ArrYfyc-*~RGX f!HCSL{ME:(-6f``9Y&u{B+"a[,n ~J0 :?WlVKݲVl3vՏ ^@go҅6_.=VÜ lcgݭ S6uq\tǧ JXδ0WOϪX{Z?7 NO/6ֿ2^C Ԝj)9S^}[bʃ=#F1 wwoWw_?sB퇇+z>5wNAӧggnynŵ9/)%}K4/APt?5rٛH7'wY=LVY"nL=e_oޝr FwF[s%/?UbvnfOԖ*?f1=e[J2l ^ uS^lrNH:!{N5@!5xrJA$ $8#oty5e~G9B5tuvjY yvrOR1x-u"ڍ>yuZ2FHE@I59ڿjFFH[(sBTcidEe5FkO]~vn3Z0@v~qo,{"NOZX%=z7L=kλ"b%6Ϛ!dc)gfmRM%Zpv hFLFl^Kp79ݺ ]]VFgoA4 t<~kWoAKb-LH݋WtY9[Z_?u_?^CVg̥ѧ%ۼoj.Jǿ Yf%\)Rӛ_3hg۴d6\߮fwDXm;twg-Ǜi*nn۲YF!ַo~bo:>lWp 5W;9 +W>Y9s~KϷ˪|aV+(fZ%p󹣐9D6F"O*X  ٸ G=/b] Bps)?Te8NܯTXuZub npZiU3W:W1&d( W7K֎)/HFxwl1b;*n?\pBvMr~|.E0 بlx ]Y$q1R@dc8a,MB2NG`^ 烣T*`B_ v! cfŚ$ʤrU"B&5<$IHyW&e235 .ּ`FErC9C7!*QW vwEђ;iRID4UqvsVɃr A9EykxoVRcbyi֔4UFلu&(bY+0<E= bth%ؽDhO#ɘPEtA%f+LLGG);VȄ (h@5ptdΆ`{R%?C+=k$(5PU_ Rp+.-n1&Oi1| 5jգI1̒KQ[|E=~ȧˋ~` 57 96|j1 Ȅ ˕\EbdT-bخh<"'Ƞ_E5w bO0O4 jfj>w1X%Dv2㗄O=Lj,܈6Z odTY`hm)UF>CPr<v2>C/c_H&P,fXO!g+'DoNzoNܸ胢 d؆Ph837RUU$cQE=N)H/>3{APqw_ &aln|^?fFt^{ 4B4GFs,&DpYkH4" >%;X4YL4njKWV~Gn(q^\q/.Tr)q),,}PZͶYZ(!dB*Ki6hrbF@+̕ 0B~qXcN) v\* ɗɡ%fWI_C%KR릎U2Qp!)Ĭ|_5J^sSD^4MnیD F 4w{ JXY\UtQʓ%\S9p+v`ݹ L{`쪲*c.c" k%[07t'! \q&A[7Ծ`"^ސoOK>oYPɧ͂o5 l, Wsw({wfțKcCtȜnC F{о~b1,Yu *zxWǠW?+I{0&֤ɵ=?Ѩu̿-gSMaF,apI\P]@vx/#"8j,x0pZ5d\(@ rۤ %n<E4xUީ 2iA? .{4א\E㯣^4ו[iU/ X 0Ba9&Bg fmcE.AJZ,U˷1I4lάwiEwjCgtW/o?4C|(=M>/{Z0޼ר9-mES5^%z6L3 ZjVaGMCheATeOEtWd;d^|Y%on|vr=8>t+ NW=@xc,X'e`rMgh|ܣ^MrᗅR$q4`dX|#S!*!H\a6Hd,b 'kyvcwnGk9|6)gt\3&OZv;{1}حrg/ܱ$PᏹID eGb$P4mYF6m aJyN2o K .9jh߉ ad13zJ do W8$@Iƚ"0KYEI";9C9)(4[VQ+lCYf ͥ^nD#FeVx \4vIp*ˀ|?G'V4ڠ]QQh8d 1UX\țpY^bF; 2e 7[_AEڼʲY a &HBiy^t,=mo YOOaURQMѩѷ'u={C 1Dу` w}ez`tPN=4xU>,#tnl}S>X˄bڄ-la*Qtu!7cOd"Sz 2k2֯}%`rs.UL0UJ(RdHtJe VV(câ7=_pJrB-SK Rp&4\fy۹JsKnO|TZ8*I&FIOZd:!L-\c<@N13X VJ[B[JްÑȲJr VcyWZ{T)Aa, f0T¦@Rٔ Hgؑ)s^EqZq&X]`a,7)8Pn2`C ;=[q}7xN K31Ղat{ރpP$?*Pđ ̽fvv#% $-z2Y$*#|+Qy5V(js2O1M92s pW}\?į*F=g]]4?x6B#\n<Z|*%> u>iq!Kz<@$bj̠$Fnv"y%-/? aCs><|tW2=Օx.; d#ة1v-2^.s>(DW^DhM돭uY ȣ*e!:Ԣղ RJWބJ_=JBJi.#Y\y}톒h,iFV|LфhKС9`yd?L,BJ{QlT4M8KFUIѱg81N.D[-q@Xjӟ=R'GFYA"tD«'/D8GGב fͬS}\D7 qxް920"`Ĉ9q![B4hwi.f_^.S8zȵsә:ϧ^:~NKlƼ%k,{u" ~ƆW2<9K4&?{WƑJX )}Y3L2688ꔉP(AjRT"ţv6zw?_ F> nfq{kSmƠPԉn l 3wmnaM[Pr&㴅vLRMNI+o}vyY:77h# As7=|3Υ6Y׵f"  x7̿j56献rW}T1H7gB߃,=i(p|G&߰hXMk~}?H)2F#EX`6W1dގ04fP5[Ýۉ+mEͮyCmFEYQi'6j3lh:֘jKUң~o1%zkS ]83ofQFS}=3DH_G XHjtT]9\*=,; эy݋T ÇKFᒗ(% F@A}hTGjm44w\YRdx:-U/by\|C]o!F{5\>x59Ft=Gn_NB9g(getFY]r@`e%KT`zT,1}>3Е`D+gmqNo: NAYx)(ƵUjp dˎ۲[2j$-AP>{S>Kq` 4fm3.n,T+>ZY jY zQamrk~yӛۀSԨv+Ѹkz9*FZ~!nNPXy9͝{_x QM Uڨvޕ+5?Q uw~u'1Wud֤S5sVrL~q j [ڡsZ Ю]cSָPɟ;Zd5N M`Kuѫ4?lۂ7.b4ГjmU^q\l=nThڜ6IU ¬a툪<ڠu۟s :U޺PMՃheP$i2,UVjS#n3RPn-7S"v*COɦ Հf (9[T z1Giq)ZYPkUZuhzl|$Ip/^w7B:t^7*PCyJM< T JMEԛ9g菳MrgYw4XlCgN3Xr癄Yf2}t˵z\sZ뿼߿>[BmK+qdzzIl|?5\2gQ)CVJC~ZNbPd24L1LSs$<&3J Fyht`c&/Xݩ-Wr?簤W^P4$rc1bEe謗zKIϩRaFG-Pn15|`]CoE:GZL~ 3/__ߎ<OT_VydۄY:[Yhc"Tp@ Mgh 0s i{xOd*y{fQJH$0vIl`=(1 ^ˀE0 8b| 5&@ ֦hequorυwozNdN)B!o./y M8@ \#_re~|݃l}2-p8]AR/C,+34 v4khK`N(V/ή bv7ow~撏O) KI'*5z!i.C"`` Hxr)iqa _|H.`i ytir;->]] \qw\wy¦͝S}읆;}?6Aޏ?/Qz xOc,+3vs2d23M0M3WW`]48e_(|@[ܧ~)nqhaG7k1h|Cyy`>(8W+)u\i⟃ܧ` ‚A35v-2Ky4(wRB:&#{K #3ҹs7g\l,JtN FEƑE)B 12( YSowyw1/}~S'y=,0yI 2IQbp )+AJbeq~;u'p3ys1@S.)A<&B|GL9G &sT0T(6p4E!R…u t :8p`!RS?M~:FYk&ЊyMC`RYA`,A菢y@ȱVX $Di{<礰o^0cu#pM7RN|wx zȋ}x jtg&Shab)A:7k@Md7` /O^l[INJرY-0JR@pL[75eP┴!k:5QAĜ4yt\Gx{:RiЦ5jU.T:Uՠ &k9q{ټvs'aWGbk&8Ǭx+) ?~SӬb3n؞v@#ێDTg|:sW*nl3-3f&#r y+iahlT4N/T"@?^+MXs1x]Uvv3]1]q b vAƾ`(Zvtcϰ lDsX4d#V]W_ϲ(go@pj ǽ[O;nv<<ɺrn*qy)YXo]e$y̑ =eЄ*7|&M7>bYac\rɩͧm *><:U&lA [YԂFhiù RO[a3hfZbT4_cQMՋ)^`z1UUӶm,?J0! n&3BsDU dW5B5ر L?H \+/M!by8jޝM|*mt>StM,&<1j+T%),:k'T}|/RQ MvtўeJŎSE~͗ ˹[I*Y7$ ??|~ͫ`8L3L">W[?~؛J5 7Ðx0i)H/O~ bc|ػINo'zz`˶k}{Zǟ_2`f8?svU*U @7Ӊͭk!Д+ O`>Y }mG8e JG)T{c,A=A4Fhᬡ" 8h m`JEK&٪1 $ D &Y2kBLA08I t)G`o-N79ŁAcVBK"t?0sʻP-\!`1[!A G=K1HOX9DMU3"vAB$2Nk*XeC#EQ%'8Kye q](FFj b=B8U1̓E] oG+Dp`%r811.p g$ $kWC#Ĝ Hgqb1U%\|;p5+T@!!WH&_m @̕JjXY Zgp4^mO82q`@BxvA0S yQbקL9x>+C*P ~ M^k1-L .1Fz'M ߎppkP]OWI u"ڏfϲj% )IX0 tP>q Az07&Vb !+g9U` `&e zN@9\_\E /&@V/ۈuZ}ld&9pk,5p!@ʼqZ5,IEF肷*0: ϤU  70F&Vap5c`yh:#,PWME M-&PwlP`6ZcPI,쑣VoL& WJ0AC쪉"I+\"N3@0Q<+VAř|RT3V hvnEa"}+LL8- ZPJ'##""/|6TEbgT !_U@xjBH+ExY)DD(cܰ!Ά7)P^WBogL҄v 5HZi[Pb^<}pN 0QIb[YL'o\u}0- tg3–r:+[kHl 49ac[Y!Z:ĉ:%SN}ZIG=Pz̔ZNRoA@LpH d<NT2d1 A1JIĄ>-+x^0>X\( y:NQv _ ` "8Z<R:[p 3`G[C""Y/XAx(yYUA4SRJRPvb0^O2Nj=8w权Lm]LՐhE1(\{# tm^K xk Ip0y ̗U@Q"Dde@ %tMކVhGc3~!2ZV53K:vۈUڜ׽bF4$kFV(-5*)Ui8B2"1,@P^Lkj @?Aj!vGxc)EF WC^asWuCϢb k‰3m Z~LfB&0QIa@WT*״~Y݋QX@Kz w afs#D|^3&%Ta u.Yk:N|hIl tt\~>t4>/Kk(2 Zbe OtU  ,TLLТށf!uZ5$A\ZQ `HR]|^aApaR",T@s1Û#JoCzD[뤄KhLwੀz`B#R*|0z;`%5Drh&iXL{FJ+o 8U.sԏ, I HYQ/V19EKkLv` rի*L=mr\Z3)%&}*@$J"]4̵vAp^P@f:k /5s%06L 8N, ^,~-Z}3 &ZrRVNtoO5&t^~;}ه+,%<{egΟgp:a[o'Afw' مùŎ|{JgIb_>lm6"F\c]_dۖ$B,[rK*y?wRܬ7:7+Z+yYIgɵ'Lq-͝ͻ75w{AC֊ޏm#)= -B55xy"m0)uV]ltN]={7cbˊ.Ɲ¸ qHki^L,ys߸1c |W 3o.ge2%ͥ: n\OO/ao=z'W?o 忆K\MTJVIVyr>dj2fu"UߒʏSjt]AIO&yg6@QEy;Joz<l) E'Zis7yB ? :ghJCZy]Oɪ:(ZU؉c k)57_j(i%6fu{d֌VY+wPr!x>#i~Ӌ4jR>qÊR\jd/^H%#+^)U!)Y=,F Kp) :+~XV!#2`|s v]iG]-#Q-mxx2!]?%y$$ںT#TsszT[<9tt@?zR1FդTp: B6Ael0x)M-Z;Gd;ۚujCtw9cFBT{ae)VW@]df&`޿hP\Asr?glNr&N<_Q<<~A/_̣Zh뵠!<˴y:&ܺ,׊I[/1\7jm/MFO˂v+,J^r UN)M0r[Wl:]u{1d*Vvںu֭ UNlu0֭+6Qƺ=Zt%صu^ cNLD"u~ۼdi2Xs\*ƙ,W+e)Y)Gd`𨝑*XώуHm̠F)C:yA"I{%oHA~~*367 F/5xd4+ }n 'NK7?ݝ5iў }:&R, ߯{'dzZ hJ\Jd?na{Nc,\ϫ^9-yNpueRSyХq>Z\2\?#:nj˿ uUG',:㨜f:Q1^tr],'~8o)!} 4zM3A49 kV˚uªdݦ CҴZptHV:`N5j.&eJuO ha_͚lG_aZ &F,]  Պڜ}{a`\if=mӕC~?'ghnqც|mtU>C3ᆐ+{ad>K,?{Ij=ZD.Vմ5鞊iΏ_MnM#6 OQ_L)k uJ5{Hꅩ>d|"dѺ\u!o/rumT '壶cymE`R;Lņ_`7rCZR,-朳/ں6گy)7IƓ<:o*욣7Y܌Η7[s1&ȁSqʱ6┕84Rq꽜\Ϋ[A!Lїl]IlV.f;"qo8M6Uw_aҞ(S`HΦ禲t7U7n:'Ꮭ>n N\`n.Of iNZ3̄L=[W"~eW狢#7Tɚ_sK=EJN͛7__LrVgL2"B9h\5{wQr6{5)S0U/Fӟ]u5׽䝆쫂5"@Eׯ\G0[>|z*ۛ7M? \~6No ;TZA$Kh@#7+#Y,*a{?7o,Hi݇e,6e#,Zܲunʱf #nwU"y@>0g6evNy8y2HWTQ׮~')!A"頚\3pW)R)^JCDuGgCC;ɒ)eNѭlJ\z$]+5߳-HX$&hno`"2ϐwcu! 5ʍ"Cdl-$ִ6Q@Byq҄ 1]y0zɓTg%[ͧEkZ 7>_ 88PI휳{ mؓTۆe{p3QwLFu FZ#hf4QL4#L'Jw)FM4nIh3BNsHV%ϔNK=gф.akwۄŴ{JgG!(9j8Q kGpdxLwy4B,.7.%D8`^i%'Ov cj=&5Lͮ5?BN2Z#P6>A Z70/ELNhA{&$v͓FQ{!TH@#c䃸g))J/[ ž׎L4C31\9'|aɘ6.h\$բz")6R);'߫DeJzL 9 ]-|EOEdI-&`1yUUj`3MBz(J9{H U%k ` tkRSD9 j`nQCg>L>5 %^\EcJz6asS .$u܋Cju-7&վh$V,9i 2 k0f``ëePV;ȩ0KJh"x{(eP9ki( ~5S:T"@߅j>WH=z7@+e[m Bw-Dܔp #rCg ҹQ0Da0xb(o` <^7fC6'%H"Tr~GqͣcTz%c"yMc'܂Pt3vԒ#ԭ2EԤ΃(E4HIeMcV#Beʭ hr84C:IAr!avfdh,0$a:/!DENrjcOZDhw}QE5  "A -)Y[mGۑP3/EB靀#i;NPb*cgb@JPy[}pF|TZ&)]>4&XꢍVw<FZh8 [4 W,;S,Rd6}A qʍ"@9;Y^UuA(@OOD<2 iٷZ r@ 1"p],|*(l9ف}:TkLA-)5M]2o$X +kb˾ )@ R.#;⚑NDBXw}Bfʒ>cJ_nKUBjiM}J 4JT btLG# d yؘQ|ƢlBJ@MQ#:y"P><1+r(^ ZkuJ!mPsD@^]Q2&v`\a]{ϮN[b18֪ T)Vvghv (`wZg#7*3ʬt{ 8J!ZZRrl]qQDjR崎t :xF L )ceKGxR alDշ.Y1 FBğx?m獲$>'1!ᮎVj,["(E^Ɋ(V1ZX6ۡtTFn#(+F Y/'`-\'}\̹Θↄ386G=8[G O^:=/ֿ c(c||?}p14wol{{/ϋw~w _oNH+wEkBJpi1AI}kܲz= غ}B ߌͭ+m6oUgCer Qc4qli]5 wlW4OX;K[w2(%mgKr4Si}bNWh7Sk\CjGsc @JFcT%7x.Cܓ6FtjvEk/pd+yBaVtg/uět ŵ><+3z7o]7Kkql6qNx m{ip+YYkvϨ^G _\*zh;tDŽ\T1VS֯YT3GepO_UE~=T>)o\[m >VGʎS /+qWͦ̏(xͦ-@}qW^ w\S|l+jmcܿ>8ցDſ;1W?z|8@զ=<^'DG }̛WM~qs5Y[ك 3PɩY",%̭8~bSl88>lWiz#,.hԒ:G>v$T;@w/e8Lvu^ "pzmH\|ViF\j5Ηn]i8g0>ԏ45&k牢?E+%DzY^@SȖ?{GvOw%K?*hj6^3[#_#p{ B<gP.+/a޵=l{{UzyVc7תEvo.?dNWswy Z!sʇ\:S?'|`3q@Ydd\R=4xԊ(iq˓Q_^#J>uK=-|}b{LKs$KZܺ~?l\Ug݅\zKT_껃yzI_-\ūگAW_##~\%ƇMܐ<9'8+gFRF xļ}sh>,^׌_/\6q{ ^^`v؃0;=ioƒEy` ח`@,[׈=3ﯚAId[lEUufa}k4Bs<-h'ح8G)7{r@IH?嵤X\*lXr鏽1&r@R'%[aU Au]Bh@rA} F^4p^QERltϿr:9pF`~>DCʃ s }'~ Qޭ0[p$g%b''SLٯ$DsGN7Ir&mu1K$ѷf tSӧ> qCqpȺD ?o=k% w 4b#yE yI5vIGSFT| & WpQu?gۜ156 $\϶@mfaY^ ؔIr3vԫqX1"=izk-\kWbe#oYB-ˆz♭%w [>|>JD1"gye/&sqmح< k;AR!' ?*dNq9Ȕ~Ndꯖ$ ]pDk!V7NKs? G_NJN#_ .6AV 3E2Sq~in^*7 6׎S1c(h霒If*r ƒ'=7D1 cuUdЂQj(96sۃ {)6h ʃ:9Pz"D-T+y/abv&\"AN\^fX} lB"m$`„J>-ۻtV|am/K 8};̩?*_3YќԔzgO;v1`f at%-%5U>coQ!B.9蘒vYU+1H,/| d.9 _ e#T>_CU?G}ACN!&9@."- zѪhs5i|(tTͬ_?̇/f0h PAҟ&!r. eXrf7Z*=/QgxaJc"< ԍ ¨i=7"<mk$C}{P~KpXQ-+3 +e$-b(Gst8H IA_/٪|n@bo_I}&R6 (( SSyЭt?$Zc_WJA'kK0QSzRCvx(سT^\H͌: L"94JP97d/WK,=Q~@(^/d=Y#kKטs| a>rWP^]p>w@p# JSˢ9F+әE:uJGR)X϶=ȔD7MudZJh8}Z/]مQ8h|+bNz1igɑk}&R|LԨ ;8zܵ=h,^h4}T4{lhO Vwu:UMlS""lO\aC]LIsTcCO)/c>kŐ{2m7]爒nVWd2I\Z^KU'S5S9J1)RG>'j@c4}^|qH(ômߛf >TKCoSg{a7ld9]J+-.e;1~^+γ7#|Gp J DyرJpnNRIڷoiϣ,W)1^hGj muγ# Dôwh$!lhKB3ۛG6.:r5h9rg')8T3k$@PxG#3wx1 /q9H%CMvA \%#<|%G?;le $ cN\,L40N:EBnݠbZ X,3s$}Ơs 0DЬ 45PhzI\{Ia4L38, @gTPR@+'_1k:X31KUu-R=!Z2< 5̴ Gh.v(̥'GN{r^(y:>6jhvn^ˑ ";yT+?p qCsKOJYhV?tB$&JrY`(xPZcic^Xy` {wYuf4k}]8\+$8 Vz]Lc(iP Aҧ-!z8aB{%7XI/S2w3+טr P%d 'q0jSUKKnt?;7N#=k쬊AK4w6XP{]Ƽd+,ÑASX(ɢIқ~ 0^ !l|Q7NJ-6ʚk)Džs 8[vǻ/i4dl@jŮ/ť7l7>~^ |u@k8ܲv.'dz4?1 IqNqnKe8!KD x:Kb6cEKo~2e鍷#(|)0#-N؟v=G_#p a?YT-= ɩʍA./JwTLbERH?-YR_9:;Y!I]qn s:ta,t1Vx`/yH3<.}t$Nna3x}ݼȖy,&)o7ofL.,:s2eŅ?~߽)u鯈#k{8I oa c*]PjbXܺ%O_ҷkF R\^\ZۇZCA EXfE(fW tH|wɑft0]ˬY rѵ0;0)mȏ4xfA/ <ovb UR,.N,yS_dv ]2+د΢y`꣙R1MM&Bq/ٔǦTìPa:<+N* DVm?ǫ0LvFFh-g|mՍpGXVk z>Җ&Qi-*gX 5l?Rx TZYَAg~|[ǒWA\}wihMd$) '/@[@pq)HWEwYJnt{"}zMncaWQy^R"$Տ B~ڏ0 Y?B)j8 K㴊% /So"0#\\pIO>ƈ.#*/3>j@-ɚf$ic:Y#:Iҧ*=%(Ic"BxDb)THz '˕ ySçno&Ǥ44'1f0S[]Sٝ閞}/f_K(nr箩kunwn}m]{Q&"ϳ92ӧImŠI1񗋖Y(Y,w]\([QZ(_%uG e۪WMTv2w(pQԇ_`ŸGq|LXi:ќd;~au󱤎Oesl0>QwWZDYB?6n|EJ7VrqtïS,3"<.HJ {oGi૵SkJhuP.^WS˹J+4kkmpe &ۂ[;*V~ RF$ f?qPoӑ tiP(lc&38MzyU{d[C]ZqM0CC\/'*w)PE$Xaԣ/RA5Q{X(=&ӄ HSݰ cD:&t'NDȽ>3TW:N m8]bϋJN d0x%r A QY>(<5gS8]3y?s.~+)E+ÊgRDRvߔ %4cz &"DDL:c% RwmmU!e+=9vqjw)QKRqPxf&-iA1N"Ga $N(@OaTfXWۣFXDQWewHY]8β>r)7V{Q˺CAe) 4pQtl1 FHD?G̈́_lpV[gZފI[ a!Dl [V[%Vm߼}fif9W<#jobgmƖF'^:7CBn*lpL2etIJXa¸Y&eR0 LZF6(ojiB7?}T(Y.nu 0$ a D|P!xҗ,{@2 C{Z-vF?\3V@HH(Bx!bA 25wLlL Ϻ̍ozR/G+2EFt˴PPGmJm:JIxfmUM#Z&r^H]V)9yQs;[5풗2ۗՠLBŘLXH6Dq!hA(Cy;p Tm޸,L Cv'R{$&wYzӝx+ĶƊr/sGw_"9C):":DyŮp9O?i㸽F)\;Bw%_$m9x -!4p`= _`iK lxh,pt *q&+ kpEcafUcty,B$ QQQG@ ._X=C4ĊL@(!|&3ozj #1 n:ZRbZp8Άo\Sﰚ]ך_VL3&y7տb3}<;5YϦ܊|n9"v/ o avb\Aj49RA)>BI L_-@F0X_F>H*/*YSXq,i`Ta:Ś,gӬxr>,U]Z:z׹ֻε? [L,j(xVD2/dD!LSSelLXaO+ʂt⎿8?ekW_b !쵥ɱ0R}ڵ@JecVgikeiDPOEGUVán*^pg7`虓)""9y/AlLiFbˮ-$ҞK19%;X-P+'|xjI< ^·@< #MXgWs%zgRl 5\ڃo]Dk^ȂTtwQ]{jLЪ 3FSڞ;QjhMݦǨ]e؅:,$(!(Ƥp,FY*2u;om&  7? 3dSu bHIDVZ~z:e+28&?"@gwJFhR#f}{?mz2z?{<@՟41='Т"Fn8sT6@ q<;p0^BI+mz;C0 FlA)]{j@1,<ޣ.EE6{|ReulDRIDoTZ1*l <"!|i'$"B!Vy0b!!/C%-S*טE;t~7,2n!>YݨowGC'"*AJo%Ѧr$n FKltn=&q0F:NgY=|(N3#B@#%Twy R[slh\BuS_PXȱ#Q}cG!Bf{|tȳVu-:[~gK`B%-s6WMIMChFWE uҽUHyzM)OpݪJ #R\՞P:HhXGgb)]Z3@XQIFJaAZiҟiuS;y˜4B&/!MQ !A `p7AW=^c  / ,]IK|JrH;$LMd[sĔ_*RT 2$eA<+9 FJ  Ke-Lw73Y<󜋂0ORΤ:!e%r!)*IjD}GB82c3GzH)~6s{6sh+4_9lN0);tc3M`=$R'RSKS!4+RVV۝eZ21+璆db yr0 4rc^@nAf@9+&f{K5ԡ 'o\oos/rv^_-sd7Z:gd]-ubVи@C?z BH>V 5g.am<]d~RqّTiNLDag4yYKyU'i[1n6~mugquAiV±:I^ J'R-Y,6o?lVH""n;tFgn #qԤVϸ=aDӬyFv9N?]480G'0xa d<fϱ] ;l]eئŏaͱt 5Q ^p$g,L"sIp@؃Ӗkuw-\"ğ"ɂY &CqiKьH⁐_F&ʣT\[kJs,/UrkMKW-x8њ5_kk|mZt>K"@wANp(@er 4ϊ,WB)(\fe&rR|0]-wC6}c@QeyE6eYUn _4~yc&! ;6_&,HΤfIԗaLr2DgJ) hA@S\d'IP֟^WA8k¡ͅ rjݪX˕^hmyzt}ח}xWGjxXauy]R>ju8?` |~S$ן.F4Vh =;^=Cx:ar/t͟cZrWBf=.>jԏ C0k׮566v=,ͻeK:- 5.bRN1uبg;}R9ijڕ8>McTfX{P:>d­6w֦-(bC˩Wxw8m`{7^ٯ;p0_;!*P툚Jn;/x(DN*cHw7.Ȟ:Y h?S6]%Qd4n 7IC_)!~J }T-`z`-02*=KF7=BEs6KQS$ LKv%B^Mn;y?Ǝ[U4"whU tkMYcνb8]vaK@(Gr!N}](( :-_OC fU* 7{ 8*SWpF )¹y4¹~f0pVB#4s¹;zpE9f6]a1܊z>Zӫ?}>v3ի:0/3u6}v~ցLzI7|Q\ t?<[Z⹼q?]4\;Q9uW' 7eqr^7ֵOe]KB^ziy㨋uȤd8a*\FoOWۣ&NqՁRݲ5BcjLwK`nB^6);ck.r=U%H歄TV6:\@[ J-Mo"JD;a6i6~A3NXQǒ~Y,}3c]k߲N }}.0{۬h_qQsd4ܝF4w28nJ7w2٨WT1}]g fmXd[ F; #}_Ȩ;dHx8Ai>&0clst BY)WnmFͻviD*(l:f[ ګG=xgm3r q)r8Y8ʂ!BcdYlx4j^!ac%؆SVpb񜲆KѣI1VM>Ojpy$!2̟c@I@H3Jgгi*G3DiDB.2Z< pX ]d` \[. Ŕd?ÀIx[kK8VG*eEj4O͛Yh|qS[j(:S5'˳_l_}7#<ƢFx;SV]sϩ$F#K O^sȌK  BsQK% ٻFn#WXJ@Jl9uuN_S3rOcHQ!)a8/slyhn+BVJs͊}qtv, ZvdRrҋ"?0t\}#f~2n:못] kʄ``$H kt B8k&?Fi~( _L"ofɊX4id0+FӒs~Ӣu:Pnm61od4KKj(&Z It^0\PqbH>x|hTymym|oTJ{vO2qK{r' 9r-QNrAk Us R/LR^Zk;.y)ؚ^q:s?&ߗgbJJYPM_<MV=ZHD ]áCz+D`Tp產\yaJV/@mCV:!/JHӑdeѶO:qZ݀r}pw/G ,ӓy)ڙהV_}y%8W_=vŮ~yM_6ۇ3Fp񝓿30N]g3/4+L(Tgf,fR?[,ۓ?sEq2zu׿M~狖SQpDSǶTI׉e`/).7AFA*Jމx 3JCgf|ܽQs0K!nC4%ʹ,kQK8͢c;ii|IOikePz!XŇ +eB'B9p d aEW\ڣfد=Tndo*?X޻B^Kj%OCdڎQޔ:lH% 쩚c}g5'be)w~m:+/ -Ъm<88v=8  ѶCm@Rou';qZ{hpZ ]&%N?=8ۻ+t 2Ky__^xr7Ec?liο/%iq=5qR!Ǩγ&)hT-5 .rS 꼐 6Td;b*Qțo=|toLkPQ*Uנ*{ϳe)]1Dz<5,/NB}/EfqQǗ]yV ˬu~,mG.ˋ{T~Q|<Z˪gE[2n]O^a]K_鷗PfiǷg8޺=gIޜ bɡVN0ٌy:dJjtAɖ"Uר\6b,~}ozXW-BI讀8BQF6d&ne+6 5pWP59(N&R1S&h>ARM ]zQq2Z8 EtfGCdX/͹iHZSc%+6Tyi2I%Y bBXa\ ǁu}$F4}*˟gs$wqH*8y xI]ZFGm" b!u uZoPtjuMN: #*t̶1MN2Z1zp ẂK3W{Q9KInSpk_:cϯ۟ܡ==xbĄN?ȴ^z[iϏ~r<x_ъn(O/&S;e~8IS̒-C_.(3Ewx/Ocً6dނ5\4sיe&%~#e뛗?=拄PDlhjX$ 8' %Gw.Jf[]m)x h<ŗ47 #;rm!%Ht*! q`\VCظHջŇ~g^o$ 9R+qs<5ڍ*V|H^8 `XEeJ,Xdubvg*8:" A h&5$|y `SlF dK &TԸN>z!IӌJR\ۀ`ݗvi"!D~(;"-#$*z0.~ʖToSJ;ZWK"ysJbrA6!۫wZ ($\#.<#LQVkPbt*gpnl5B1-8XrL%=<b3Մo@:kas^Js[<=8ۻ[DbTl+5bT&3Wی @C0#5I{U!Ӊ =N6B7񕢠x'D 4ir|"]u0*ӤPZ%9: Xz&>:9P;mvVm7mDx>(8g)]caCkrzLNY% \FarU+&-]I9E<MsGde_~u|A8yVR<'Z`RX("&7{f .@<?1JCbrs̤u ]LY~ZXZfCo|=iZn|oSWi[7TB- 5LJ+!s#;Cl ?3K$xJl&:LFtȉ%Dv|&Ov.(]k.LEH&nx#,BQ)ah_KJH=)͹ք.>f7v᧻_7&ݥ\̴$jURi! Z㚢63ˌZ*9Bq4\P ` 8k! 4' ##" Nw\j9YPb5'I8\Jpc9`ɾodbC_vR~&J#5k%{Ss|>{CW;f<},I;ќuߔNʊ'|ijW.צ"XIW- G'S,2Q%G6jx7eSQw`)9ֽϋ1oȎL BuΜ2Q9=swo TʞV.+헝;T@Jiڢ~ms4Iu\VTh_yFX=eJ* F\QBA%RWpE\@kBU](7Z#ߪk47R)ˉ2`d/Kr=@.9x4PʩokeRm]m{ .h!&$Ӗ1 e^bp3>W06# L4' r66a(\<։C%V)(mhs8瀘jBi$rta!_}FEeN>V)Ӷ&b/`[kASg., 7V6# (f:H1m4@L+R}nMݺ/D6.HR\ Fڮ[W5Rް %pzk?%L"c]YxW^?R]v*g,H*9:W 1/&$|hH^bڀ+D !{zRбԄs\P1A!685}J)="BLȽɌ$5We(0_*)l.<I>Y$QLpd"5*=N1;ڂ•=W086qjT;uOk-MJ츊#)iuMfNh\i (URU$ј*`sچ|>`3fR(rdVa)ưB ^)/ gLPcc",e>:'DSy`C.ja{^G$TfWH%D/GJF1*3}0E0^q;u,hmpĜw*΁hW-r{"d|p>ӳi_S;gn^/-r#usj2eY[LQ!#Iν*2A8-$z$g] x{mHnbS4ΈݚtKEg91aNVft!V*I3a"s wF@N ./ZR\(89q`(5?Bםptv3N(9%npVի$2%cR8$N9\϶&vM4Z wL,X  <8ǬD,ڗtB,H(ÕuS.aae)T9: kT  C=\F[*͔1w C,pDxoi++5;#Q|g!ut9oc" zp 2w!"JLH\s", WLH"D{⁋ qMl4^3XISuh LIGT f7wsV4ȾM6gq .;jLpN9wmq eOf$/ @vcĹ$&[,)}=#sþRѨɪXUdV 0Q5TnvOU@0b{=ſf߽]An'2*- =ys>?;N_'ƹG,A8)#I@><Η'Owm0%ty0DFA(FcH{њGfm`M߫x%E^b Qi9"g0c yc2jQsF^@| F!~;GAuGyo{;2TxtrRλGM ȫzsu۽~{  2{O89[s0xKkY';Yk\Lb1rAУ%OV8|7ח.oMR n~ݕ/ +%g =軋FooNwP簐/qmdcc N/.T$Tcp R*g{3Yl XpڈJLyzPcIP;@'1,rak% قsa)&,̕XRS%$ePm1gs;=h-w10>}o%[eB龽nd% oh a3o5tg i (A<)!Ȕj\jp-g:a=rI4BUZz& Ev}։m8@!&vXcX]$W\ qsuCunF>o߽^c8 /w~Q^χ#hhw3;qA ?/$0qPHn*M݆)㯗-㲳ҳ^;E$䙋hL 2:-4vAQGu$#ilBS[E4G$7GA EtrǨpDUg-Tքuœ.a}sokU-ٲ rFթa4tp}lj~/&mUٓPU$T= UeO⪲2(č"ОrLIp Se!E& jKjyWq> JR!s:-1ClMYDӿ-̯|f2ZK}XIuQz H f +,6X;!$bVsu<| W)ߎ fvJxGR=Ɏ1nYig\ -lt|JX` iPۚ, R@Ǘȴxdû;PX1 aZj̇d* JQwVY2*'yCf9y5R!:j^E0:LcW/ W%A#a7.ɉ;w?O:'aK6!J*7ԫo10^a%WdOwxO%zσȂA9uILJ coK˝-JAԙ`8X1kڽ1=[ 0L|1kU1#)1vLUt܎MH3,t FDZ.RD'w:ڭ;`*Leݚg.Y2%w|ڍbq*IV2Ǩu]<`SiNU.wg.92EО[vg-8UѸ^iFO Vvk!QK~^GLW4U;*Cxim{̑\=ͧg%_qƕ^1KH! )Eiwd0N#Z#ZX (]"u(K iΡ nyB ZJ̙T&c9sd̚R]7f<:d&neQdogTwW"QP$䙋hL "oI6gh-);F֣w@W[6ڭ y"%S-qX3d K+7FJC=`ھJCWʗg.Y2%~8N-&tCf?& FQdJF /PD# , "eURN"ɀ&cZ,1FUhpR֊lT1_yڦRTH.7N[B&a` Bp1( /-iQ&UYDI\X18WuX-#kل/a - 'A #aJDJX`]׌i5yXc_>'SFVcnN_N3S{Y1 g$|zn;PJ#۷j0~y}quWHO$_3 “{3Wcpf-x{s~~'տ#7ӋYw.67I!CGNYiP*%í<̘}M">?y/۞ѫ?]^ FX`I۲F9Ш&~tL&$&!qap%8XM3*&Ƹr"0DVBs"i:km3)yȈ\dc 8QѾL#j,j"ÊiBVIHҪ$ ~tLHoƲH)`,,R}?^g&Վ2Ϩt+e@Y+IHxSp4 :ce$RF!j֞p%4qG-MUwxJ:j\y * }QmAYdS3 Xs$i @:Rl`(&BlDx)ZDxdӖ/}LB>zX]änBwQU 6lpe2tsAlW1w*F-G֖ٴ2fUn(m,.Ąâ`{o~q)u cCP֋I>^Kh_X^u.cO F4%Xmu{,(2/>xiHh;QQeEw~b/`/*_O<% K0_,t"BͧԻr,,Z3n"UidO@욟 Վ3' Ra RdMxQ\+dJ g)ɮX$QJX$y pvIO"Y7.ԇ<c| p1Dt)XM::yzhD`ZhqPNԞ+$sq}*Oν ŔnFQG.#]?fvx>ˡER9KYh9ԲOyC+D&>-w!S+[fO˓|74@}@s6dqB'!dqB'qbb6E)85$cgX(2ҙR[Pi9V\hk`~ ԼΊћPC\>h@î o˪t3^In{ihO&P3-5j&֡J%[˄|J: HwE%0/h@*l鞟$'=?Xaj \26n%q !:3Dzr0ra$܁TX{Ks]ӢژА;p?rYê`ӓZi6],=օg0K`ia5SG=XEKk#`cn+ǡե(|BӐ%lפ8Zʩ+$y>7FZQ\X0bŘЂpH@DvhYX=&FCᆇs()Q`ORNbg L;|o3 #@Ӊ3]}xֶ?5*8ʓ,?}?f'mPfG71|~utx<@)7PSMh^\V˅/w3Y|ysLu6,'}:b:ܱwqmEmAdǷ "[Y4I 嘴Hb̈́ *aGD%8`\d7Eur@f4kXD,'^5{A gL}) @I;--SYTvI 249l:'#2a5F r^rW?1ƫ(Ƥ?"eW?Ez- c(DF!v;^Ә!U ?=TOZ#|Iq%u_RKhPEd"!ynFMY"PT!qYDq}ZoxE \&ڱ@g6P< J"4z%lNWadSN:=4pc,H}]ji|2_E}sRJ2+ˬ/J}I,8J*I%0F !yO,qoHfc8&z%1/lBcP.J0D}kű}'] r(4!Zn 8Jue4kP#5Ա9m+2!x-.WEX!g eG3O"Y?/P)It@P%dq^"nGY͋ȝ&7+RHfr/Z!\pIjx 4`T~ho}o$:!'Pv/\㟤F~4"(>eN\OU쿾\W״Z<=3}oU_zEv1޿3OP痈Nc_F'v} B%K*A=Ca:[0x4|!Q1or QZc;>t[Ű4{.ply6Y dCe6 l p!ׇ37{?4s! d2A`rݺl;BG]>#T V%] YP]U*_ \*Jj!w(tnAh7)ȪrqXQ}=h|Q}~p h>W'2$`/asV4PP@8jJ Ih ψSΤ13a: A!EԶFI/aWKVbhr5"s(iPGST.,{$`; .14Z' pL{w:$u#N wY9+% oe.̹j@HU;oL<"aLi-wk^?L 5'/0} ]>FTc'6lVBRLқN1b9 V ."ҠyڊTyB5b9UǣhĖLFUE%7g0,[k>&wk{x5M?],82$(4MY@ ¡0U{݉T;mT0*O}x~X˨jXv`F fc }8ɼRܧ]uBƁ RևCM^*s@ņ(IPc ֭d;IHfo $vMWquJМЌu6sC';%ɒ?S]}?޼fS#ط!w~8cM^UƳxySX lȉh' OUgͿcy^AB,ة K {ptosj#@a7ǻߜ:c4?F3[P9v0?G7;'K8*e-]48h>yW{A7 :ނomw6Cj=l+>X;k}qٟϕĕaQwgѷpgi@w5Nn{<B 3;IP[ `*Gx}-ВK)__}N,RK) Sdfs *P惕h^\)nϷDS/(U>q ,ZdYKJ$ >F*LrT&mըZ7YH'GۄVRȥ Go&=ZG^e8g|0FV.93 %'P*.hz)*:I^dEPΣ+c`ģ]⬩zp|su9zzh%Gz7(Vr;sjp3qy3RC sh\$@1[ 6:؎Sh B,q64y] 3LI (K[yd.ˇF|*ߐ#O$:,<q*&';C05p!shpk5rCw-. eBR=#. :Q$ƞ4*Z|gtztb$&ɸW7 %e2M&@=R̓\(Zk ^+@uڣrΘ:[Wvf!k%ig(@X+l҆k)P#De\rѨx64y_"R߬֐AV,uuZ][ ^]OغyAJ)&FińJ.Tn͇-!C=0{)A (ZCP>@@/-@ֱo.~jEmel+Vei+%iB;~ ?ƞ9~Ŷd$\x'=gj3Sa@rꭠLbPB$+!\~R b;E5z5?(r;Ɩݻ< ӸM{ JVwv!𓥣>ΓKWVw9:^^tU0x?Glt5F}~\:60Gwʜ7E ݏ!:ZBh*"Z{rÙi+jcdEŋc/1)0f)+XI)E\ZKcjRjM-w<{S+Nwupo˸P 2!`;I̻2vs|gxqɡ@Q,z뙿_9U/;@8{q1f Z+~GTtAsΣ1xt)#e^<]CiN92_:Y-)HDE $̊IU< 5npA̤ ֲ:8gu1ϓcz> I"ZTĞ&\.t:O7{|t[)hGGwC8o|Imdop8OcVfSSƛq@w,ӏÞ4U~2"8ICk8?%o"UW)Kl4Bh!=S&6ٻՉl XK/3t >tF/m4cwJM l$DM\Ͽ'2&[9ʽ7C]6;(OU"ףJ5?=FN/;POäa=(@|=\Ui9|>HVJ]a훢er8hr(w?3e/n4\6/{ůǿڮqMՆbb HZ>ǰha; }3a&f(5bO_Di%OU'e<38mKS 'd.G^c {aZ#uF ACQ1#In`*8TN^2IUG PRaaR)Ra;[T8t~Z2HGL_*ȳ;K[fR) R-}/\ jagL=t:QOHg9@ōߡK=h cns u[UVuGӛ.Es(Gk#1Շ:|YttK>R0 /ͼ{C;t%:WʥI1 L^?{qC3l*lTxltOl}E>GGݙ.ttg$]s68.X;t՘@mWPUd8henP%U^!] ?Ѫsq"ʂueM:G{vXIs!yoTR%.~ah@lUֽ FB}Wu $`U?2.PX| /]@"Ion.|^Fh÷ $*13ԛ}BjlI!(ZG:5h>B_͇!tz]߮ϳO6q-0&(D"bo@_.Gؐ\KTVa_Mſ'i_ǷozGv(X*=#p2|zx:\`F7W&1l >FI9k(F8/,H1gp=1L_&Nq" {!6zy0w!jQNsFj.?PAɳ~/.Me0~Q.a+eÚ^>b$%^1bI<a#n5ƑbÜ kaA>%uA%X+*=(8ʕ0ubA9s5 7(XjP(CF-DHH̍wZ(K5HJc1!h(89|R۷C&v(r_ӲV\ Z_nM·oqB9 |N QMD \ 5VG u,r 8Og0E@0GOS xCW 癉bL3b5VΥf(g+MAN=Gq$/NZMf޸/LVq(թ,s-"!0Xx# FXX99gc.,0<4F ( ՔxS}3vL}s#W.Rnmogvka 3X#!c 䌈8똀Tb9 Nuxkz;9ג`*~q].J>\{bf= =-u~'Jݭs¹{MN0N#OY/I  *aQISncU lHSAA\DLKPmq^XORi/=VR+>",uFHkc BJRJQ#&r" SXA( '$` \;/yr 4gF CWp" ,zF0*MG"Ml(ᑏu^SR7^5KmtllX_MtML@ F42nja!Є`"69z-HBlLz[Jh#Y{lrߛ-(lPn݋.~6Ϧ6|1XeI}/[Nw7$!w&77ti|pyЛr7 '$U>g.Us^~[9<v6L \ehD))$E|%~H+\2Q]T#[^ N6Nn܀SMns[E4L6u5`ǺQ -/YD'oXVxRVfg2Z"$䕋hX&kny1":y8źշ ˦ʶn[E4L1U]>0F,S[} Tmr4u+BB^)He;֍fdbEtqu/82{Y[E4L)uD4"vˋA)֭JPF*nպ!!\Dͯٵn2uK-/YD'oX+SV4@Z"$䕋h.:*.[N 29ƜmXy)ny9v+@B^)ܾ}}kpWU~ RB!*JHMPHn UuejWm{;DDtG]Q2XB&]JWR&h۶[IRj(@0cugH2]HM ֝?! S˖vB5#ٺ3M$ tאYҸu5 ^ˬuB5kNےYLeֺZ 8o]fMI]fˬ 1ھgBw.V&pLڗYµ.*2ksk̬)12k]f@MP}2t}5Eh+Ȭ)=E̚"02k]fPMЈ.(PZk3k5ɇ̆whnz.fypv82^]"cEN0R!"9f[a 5pD- ]l d"U!ikvc;k"4A1AM&f: H" FrorAqq_B$h~F3ӛxt\"/,ӧ$~l&3c?pU/Q8`gmObfd|'RЧ_S̗x P_1Ke "- w|t6O挗}-,z̝'߯o1<:oAR3?ݻTS%@ }2Kw>øNaDަOW~ds{c<{?L}=_x'u|wV7xsSlg>vBFgﭱ NN`cr/'ţ?8IAaIl2I\YBivNGv擗zM\aISF6]>j>>!oHJ_iTCti8'`S؎N'u ]tV~6`o6|$r°2kVsJ`pc-#noލGZͽ~le/F!XwCaa<F#1PQD !?8ƷKCv ?\V㡬VG2qry2l|<Tzt R?Ƃ]ڴ{3ƛG0Yܪ-N5pjvc`Q}K;$ %ތٺ4Xeȧumg:m(1" 3Aٴ7ic>3y!TRT_ypPMUF}˚ E}_]l*Xԅ!.Wv.ƉyVάFc3~2wϟ76~>5\-qâjqe1b{E2]$CL ?{WƑ b^򺡺#A(Y/@i.ԆLT`Ȗ4̒cW\/+s=empI`OC|%]Vϯ0QGbLFmJ~}bVu˞I"`t($, * ũ-EˆzB2__|zІEЬc3j,S |ZTk]!>):j=FQrsIa{p' NwڋMIkgIiHtgװU{caASW~$iI_M i#>]G0dg5,oʢNӖbE'V\Md+'8Jӓj*n:Ы]FU{׿,a&7kY9TZ!i,Y2+pԫѿAD@=ԇh@])zYM 7/pJJ< 讗 T>X1E^-\Zi*VG=f\Vq{R PjɅ*TB H"T3*~̿5j 7WkM)8$kdL+ZG\t̏OA\LA$TOhr)Ig,-*hflifg^ա,NqsC07)31F{b-y\KfWf?sPl=^QkgWc a:‚emo:F,v6XRX[Oʺ8p) xpD}R_Y˰TI4_4j!AEaV=[ Xֻ\Z&A=uAb2ѐ FnGa>FtYK$!S^_!+$$$T%;$Ÿ0w͝,}"Ñ'\c-ZG*xČÃsٽCԨwHe-T5`07`E6"l &8FXL1o/00vޡ(}d:bA5yXռbܡocVـbu&tng򐏵bJSm2:^ZA-6i$DTf",CA+l3ZOYڑ!؃H7LN+J/_7 P SEuaS#"+"1TW 5B9Mx(8$UݺϓԤEkZl)|z-*dʇ[5+ V$!Uo٢&Y7E5W~ P:_ϻ~9x_ |MvmfZg^'BRBzuj1V r1[[xU֮\J &~҂rXAesw‚R8Z(v}zq̟/Y^2i6a*5TѺ%"O-ZOBxo1884kJҽkc~pAq ALpF! 8J):j99Kx;:VH5GDT7t('Hc(_f>p8/2k+!Es+[zŖ>^Y5 WY'<-W!/J%tu;³Ƴ"+֌|9GRWpiP+5ZF1CaI -S22swqB}y0jD~7KGH1AjHİRu^7L8S q',SHj7_'ZQBVB>N%Lo(]נx,Rwtt+7+R%t\1[|?MGQ{ބGiV4) 7Ԅt B(u${ywKd[UKe4!;㧁 B gY4 ԩ݉wg#^_ɥ׍ =Z'h݃)o.ZzS,媀؉"*.XIO=cl-iJ2gefVkOxj# xmA,HQ|sZUŞ16[ Z9cd槃:_!{(4`˰һ@lgM Z9eվuO p>̊Yb_Ž_歨7uzrE5e<_IQ}Khe׳|uY~qE*o-OcpEh$PGM@|vrBf%D wmпZ3b&sPɐ݆ hV:'ŻG\s!?## u9,ДOyL:u/9qjdz= JHtLa)F?8B7oƋTW2)=:ՍU"[u:(Y9#K-% Cu@3NZ4;0tVӞ1^`Yk◿`d7 P'T]t0B=ΧȻ˥&B;l-K[Dt!T޶hTf|_G)'YW4tWcP1 V)zp Dt:>@EtNrPە=W{ Ӷk}s]?|X]3@#38u56Ǜ]B. ̄>;If">t>-!TϥI:I:T; I0KgxT$l fJXcJeC4Q=DkpװݜOe0' s8XF/ _0.>ۛ򭗖[9SLM35IԤLUo7" G"Wڨ=i2 E0F` #WӁY- \Ve^Kc$89h 68[يId+&VLG^-6pG9b4)mTdA{K^(x4A!15Z{M?Z -ݥ)ĭg,t XT+£XEF"9 TAbef l,|6Nɞb[G߼^W8UذeT4Wγf8J$;R P"k o- WZftpa(C }y}atz?#^Tɻ1M!M#\Cxwb7[ڂgo'kmXE,U_ 9'H6y `EVL E%v[=!9{3Cږ &՚.]UXֿ{RPpl['/.Q||3=jwo__dԱH7*<#'P9œo&77S_dfj#F2XQOjNj~{S_WPf)XqJ@xԘ?B0 v3C֚J[$wΏw4wYk夛J ,].-[{/_.@0DgpRGRb]1̳@*4 2h R/B6ĨWNRT5 'CHvSH4'Bim4E~huG2Us*JP1"CF~0 6T.}5QwvƷ'ۧ /B14"~Zo?NŖ(hx\cL4ou:R0%k X4 6¡q- CKot`L!H1 1kDY!W~k9X*HfޕpAyīF \9twDX9cݣݕFIEAX91-$kr၂+ۦgj43dP&޼Wk{5R)}ll# 6Gn4m8ţ!Ukڪ57(٢Vm3yx Ye+ o=np$y- 5BQ)I- la- $)A]mpŠ`u(dWr1Zu%XŪŪ%e&.G$/5]Wʡzo=ܗ]5l ~|}=绷˳#Y|Ǻp7Wl(]lKO3{碽+sm̭MѴn=[Y^¥krimv$!_fɔ>ggw3v[.)6 MvlBSw.$+,z}[.)6 ED--ݭ Ex4r㉃ߒ-(yL61<ٍV?|dUdXq̌&_LoѰb1{uuEK_ierE]ll;oO-p4ʩ:dj ?KI+O[Í=ǚ4Ube1+n|j?bil#g.`M/+'nԏaPI`< K#׉WrLROPe%P4” <c\Q: זpW'4E6rM66_q3Ą. IB2gK4VlDNn5IojZ|7@Lk9B6ﴧ y(%AT}V cᶴFNf̖тkY8BJPpp#[o߮ e]Q/on['"a%I%c.氄3v OBK>d1ES2"DMAO 8fLEm]Vx XY^M71t#FPNo*ly'fZtQ9W;-Bbc nci6\>D0 LBXg`çX,Rg>,C jI/G b (M^c[Y0kRr2f e?sb$m=g ]meӧmp4s[9ljJbM h_^>٦xOl{9n/;_Gf/K ް)/QWTs&F0H'E3 xJx9TA s%,b*Фp fO]-z+Û?'ˑ pWAw-GiZrJ$rTI,';o0rj`]XzCn^V*Ҥ^|l].ܯ"Rs!N s;]'@CO l!|92DB(8 vC5a7x(At(BHp!\08YP OTH=@r2b~^D6o-^/nR|.s Ct< Z@bD߾,oeўw<:RQs u<:{eU"RUY?_$j/Ip7?ak> 5z!VZG\yRol>і7{%nT>bgKtjMU&-q~,VÅ<:i&4xa]E#Z1 I#7s0O3cY#gNE^K:.ugb`OC['`@Aˣm$iiƄP#pPɑ<"m8(W4'[ɪiKSKi ;zøe(. i"t~maШBP;7y{EC\ Q߽JZ3 O2rZ~%S˂@txॕ+\I•_Y=. N9|LL # pHsҔyGvwezά%xc'I=z=k^o"uެ$5-|Gެ[M֛5Hڅ|"zL ZTP`!o )5!MvVDĪ`eTI{ U5PZQPG$SFQJ+[d_ec-l*i5&D@͍55tz&]NpD94:{ A89g9ZK)(AݛQYX%_' ޗCPMÉ㭘t8^ғk:,GitTӡ G!ܖ]s4zA#Y 6L&^=~|ja.=vAis|M) ny~0pAZa7w|16Q^o)o?]ZT,S=o^s2W uِAQRz6I!_N?T_|'j@e"c𦪳bvJZ5sZ9@O.*OPV€\t2&g[H)! ~yEU@!FFI?2җ%m>1 p Aʛk7NzT #էK]xk>$yW]Ç.^?;\; ٩Q;-J|99URY++A3m:Xy4b^],ɎYr- JRkTlYX.T*)As!O=D4I!Z*NwmD ["j˷V )+F ۣ5Ta>8L 50sͯoi-)NkknF4UTjN/ra0 EiIʎ7~ËC\HQL*E 1ݍF!P*&}r#^_NVTUʰ`Tbf4٨QaD0g==bFxVanT|lZCyH?VDÇ\zKrlZaFX89bR\#rIRuvMxb"1Ղ (c^J7d^CA$hδT38C'ЩWlp-Cc >AS0X_{XuwٛqDLd|¬Տ=)OAO|v2+fxR )#X&)8LTpD4x$tSiUU$UZն&`kݙ)5۔0 x]Yl>[WVQwS[qs !Tm2Р8w"Z[ `CQ,L"n&B$E֞fx(֥Mt apu%}>.n*˘%^$lPwÇ\-Xu[/SX_ХQ`IЁ)i% S /vt1oYO"ݶΆn0sr[{5?bQ;]>W@1J8k}AY]^ :J:뫪QF>|й&/u;망cL-g|R5;MzCoU"۩ZNz9Fcb̖$HFI8aV 0*5ei"̌akuhdJSǥ+ƘH(4 fIV'DDX!H:a6!,,ȋ(';f*BųBh(9c%5E4VJ(f4 }"ʱ)밢{PjC96+Cj7`l{@)ri/_ jl\"v21P>iLjD%V$U*S.^ SSԀQ3LhȲ2ێ(Q%l??ĞP * X ~^Ha\ƞPT2)prk"&=zw6R ERN(6$V1Bxd(hYY VyI\F2팪/J ~I83|z"Hmalf^:[-,fF5᪥69hY?<_/Д6Sd6%)!"UXwR:~ m+;ez3V> lMhd#ĝ_Ul \l8=c͋K U~Y$:d(l;zlQX]8kʼ2^ ~P'se+FscS^ 둜mu%hym#bx ՇޤӫÅPuj| iM@VPk[°T1Qk1F& !@CjRC sR0O=lRupQXGq9wf9 !~&D#v0[ǷGzg#.zh'M|9IvG-l !9əۻ/yCfn2{gydHOhk zߏw_==\Kf|mo>w3z'pKՄ6);v2 F9JH+R^뜋E_4:_Xߍ{cobo]n"VNs `]+O`՜jv V 4ƘZy^M4ā?ۻnӱT3-EP͔!*E6e[btmzAʎ5Z燫:^c*z"h1r +iW9XGT U̩ gNyē$tC%(a\Os?p-3|3r&ԃ~Mo#x6:s{}곙 WW EZ{mbWs!dcZ%f2BYmv 68C<@cMNCywRw|8MMEbW^@6+#:wvDZw\VRE23_Xr$%-@Q]rtCH5aa#xқ)d2I*\r\9vۘR eX[0Vq0a𿙻M'k1k^t'`:4 zΝ o[yG7tx=N4-,hXxT])Lg[ؒkr(/bJMLir :W8|sr+L&}4FEu?T G. Pބ#)pzȿ}ƃY[Ȟ$? 4^XR`׸ޅM}fgyK*7ģ>UL&9eKc #b^ ~Aqw외ѝIݤE8sYBth`mF}6k|d:>>%x.'ku^g ?u :"C.y nx/1vV^b9X?ah2w,Pe >\'1 ;X[uO9Y b%2eX3 Vt*2spd\IJlGwtY6lLWL:fy-)/#]I0}<;tGL9.0Z9f9{I9]5?,mP{|TJ"k$Im+ p>dK4Sݯ]|La 3KYkTNuI;*g3ZjN{'5VLG.5n0o/.enbкT1uH)B..SabF_[DXGR CrF09X3%ֵ(Q7vYלY٬iH6f )y lW'\U@:!ӌd5%XK/u:U\}ty+@W-~l31 mlOpAɮؘ%A}Dǜnk8vN 3sq>~-G]򎣆 ߣ|k=Z)Թ1dp֤ٙ&Ճ24$㻇>uʨ:oޏ%37a=5%Rmn5K9ι8Babvgi;>XpNKSWRb8{~$3?$| 9eI;%] 0S7y8;N p[yh{3]OrjrPClŃp̵#{@yY:>hԜAb9>WWy.F{*:JEG4~*Jđu:БKk<ٜ>ۻdC=Y^\U⧯~q߼~ 'J:0z)S0`(2 :\bNtSŅaL.f< dk1Y<)T:9PǍ@֘3+dj$`gMyOtH(5`Y23C"Nk2@p& z>A.ek]kͤ?xLܶW7ˆ,b _/CG -|=E8Nb :XMD޽>?:˛Y\4o ]>t\/UYo%Ҵ'5XI,lNi 7=a(Y3~kt4=lh ,c@KW77|$:x+u_}nCaPC:C#vgxNyaJ1('A4%a-Ο֬Uwɝ78w$w9(n>,C-?fNUY3~w/zNhgb\lrqҾX>8b*lHɥw 5PqdDJIA;Is90zNMi5^1VվC:,u0HnKKqt\;}C9F ]ۥۚ|Z7|$lVr9z07`K5‚ypesr&"?KHCr'%gKտ٩\U1^-pxUlOa s6 #mwVnӪR*!\h $7%bB>H[f0$D. HؽP3ly'/f,'H PڌgtG=q(1JdE>BJ!0`쌔'a@'VI+@Hri"+n"DrZGCTHRd7K`!x'-B{l TZ6V±jrX3sAP1| Vv~GV x9OG҃E:x}44ǾEskV(c$9XV݄~}=<1`|x%s>=C#" 0vd0#^tJcHEWdDezޠ7r(c243Z_JP CҰkr92$Ab xQd*wjfO.(U9sdmƘH,D)44zd2^\vdb=O^M ?0`0|UR&sRVZB!&AV v!Y} S['A Ӟ,A QYʘgLVjd5SbFלdyc(qu5 9o` b6L{v+aagv$Th[qk -ZT:j6DV g`xEJ߼Suͳ¬X1Ng6˥ǘHtHʖ|V'QrwOv-x~Ho [uI8+B!-pVHǓyygҡoKhӫ% BV`ɧF8{wuuS-yGGxVr(ICfqעyJn0a JZQr?M`R%R9hW翵1#=bcRc&ToF 8)V\yhµ;zuSvgi(XCx#"C蜪xKMA-[߾3 P!F:ɐq'4J  tğ(Hӏg'_15y*w!]1BVYJٖy\d <1_iu`B2"WF69@ #üRg4a[2\^J0t+\8[YCPpcJíYu$r=`3f8D E;z7ll ƀ/?;* ҂ / = ⾻軫(U 9P;煙LaNet7Pw4cNR˩{.Z\F[)~.ZWA߈]g;!ƫSڙ֜13ΨXTQDL>}r8i^X4YB[y5E:]MFbsp..ۡ9] ~<nlJz1BS NOV240?}dۈ 죕7BЖ^o[\|T!y0C-nM ReAn Z'×Ԇ<8NU%NՌ){lĶUBtyLqaqpsϳzSU#/q<2nV`$wK}`y4h"p=ÐVOD|UNL h 0g#r,$ 񒹬Dzs EVpVFcb²`d eH! Q$=JJw",2Gu0m= @ȻG 9Ɯ+PV-xfqlwQ3Ѐ\NknQUέYm ٫5L?B*Ͷ)U^w0jQ3xs֌wZfcM[hEџkw4; bb_yz妋_y咷[>a ٪UM # ,ik]G`jzKڰ!l)gH{dw[bD BM! Q ށw"hg$pY#9[?wvllmqDE(DߖJDMBU-]ur"eODO =)$z&bZW ֢O&q,R8cҪdž:k&1"3L[? >[.Cw,J:OdmZۿ3\ZCvOlpCc%;뭇IF)"k7$i(hnS>Z# e We} X=h(a[ѡ$0n8q@*lOBF4BJ5Z*٘Y8u< TMPAM= Jk#YUy^OzjRQFM.=٩ސ1R&R2:Z\v* 2fکV/df%=͵aY[]rZ&1&pGKYtATqt ܾȿ&׌p)YdID~5e y\tbB842hpT͍/Vٺ E>+#ƬPT'(s kȽ-l7C&QLIN3n53Pm]偦;ڙR%Д;h@)y>`V6II^O~V! =A~BT4n`dB2`de&kO[ Շ) ^C+F3aog"HK^Bxq h h:x [K6TZbe 7lIrлԓ& dF RiH@&1 H@sB- 3|sy~>DQ=F@naxܽ2m:Dnz"n1(VN&gϛwO|Xy~=|;Eev[W ,1r1jx{kP~2$>E-e|j+cƨ菫FCu0K|N?fޡԧy}ga!c-Ѩ^UDZ*fa+'I>.$ALujD_'+oF" RQYI9ED2pO^J|!cx`O*T`2d>4N-曎ecwѻ*G׎ٺ21>xOvG , oog:$Ѱq W5X<6>EhF?2>1mp+Gn]uw{c8/k\!p)\/9ZI#KE>[|E_WkP8_aKyt+" A|dSQIA,=Zfk@\ƎV=ߣ'O u RT\{gbjGxT=$T"e'oI)CXCp]mb H`ߍC9X$ۅ")R( ҈ Z領/F+J#tqD8,aShKs(K@Rcn(Ke LZ 奶PD0$/ClSȌ(AEf3uNx\jC2Rs©a+4nʗX2L1qiEq/pi' hw>Ё-k218$!pq؏n/))$` 4Hd>iE p41!gĖ< ȃɮFbA<<[@s6ƛr.q}DGc5NL)S'RRDž?6n H!<Yc< 6>jP ղ1d=NLM(#GJcMpCf>IsJ-@(T*F|TC9A\ȡG!wbԽ?^;NSaI_7x?zxq+" pkƛWݝӓU=: wym(kU^V?^[ǿp`Fx8x-Wi!F΀&`ݷrӂ퐬>-l0K#cz³&f7#A_ņ^6tYLT=Fxb4ӹ ږB]k6$xhHdB4cX21lXS v;T=Y`nOl=:u¾]:)AW_ 6hS-+Wq_56>-Coj }S} ǣE 2I12;HR VTesYt!x%Z)uiNPBcqdIcx-B1$aS<19==B5?2{=oBGhe*>La0vz@΀D]i])Oس L=3B >6P4>xCb~ңn- ^:T0H%nhL Yf)'80oTh'jMByb'*&1bEC߁QRc.'X*j~O5-#D͸ KDJ2‚A@9"JXnѷ(<~oB㘼nbGLibbK) QуXDM&[=-5F&60A/' ѳhi紶k}pѮld()LepQ(Khh=- g{2 Ւ`ıl@4 B7pύ$J˶0V*4*!T6zw=Td+E"WFe.$C5~߰J WSq6Hֵ6Sqmƭi:5x؞@ 1NζoD* *#kdqVWmDSR:(*N}R%EҤD;;pv9 k_mws җOW۱@`)X@殜Z3 Se(U#-, bx4p`loP,`;ŲY %j0&Q7CG@aԦrECp6\ϯ?xښ;WyX LU,C06T2Xg/=x>ZQ T+-,FO1gZz]hq!:b;V8<7 G$p זK+yĀ/’X=Ką-SpcsU_od:<0 +WSFdŞw<}_#ry쐉b|kAicJR~޵$"䏽]!.xgN$blXlJ~FUӒM?I!;e_׻X8 a!Ŵ` ~ƍY1>=.="ퟝ|D02׊1BD8V9g䄋qPaJ37"yox94<ɬˉKXׂݘS 3N4*9y:o,/:Fn>L2.fqVc['R>]EQL 6ucX>7 aGfԉAT& +U>8h4b{:'ND;g9Z"1F1d%R5-5VTm~ \e5bxDS5)$(:E*4Sr[զOg'8 VAWYTKsiX _kFFl?jlk[jRi 4泝[^imFd)T񬂵Q@"Ƅ9MZiщ - LISct;NxzYAH&K̦ē] @'7>Kdkc'A [Q[K3 6Oq Pf8ٸX?$ 4!J.SeDf;!ټz),`:;O;`x!-hkO!54"޴іu Q8ZHgErAqECͮ8j129s!F25IF%z`云JuLjVMDF%PcX(eL$Q )bKY]vrtTR8~NGzVi,9s ,%HId r`srk M++cM Kx4 azx'Oْ2fC *-%KREdVq Mr{qjn/ٶxq.iZ|9l<6&hQ*CtN$x&`Yg2k\ft됎h}ӓu])m3 6H*Ћ9bX K5| ЍZc6̥^S,fzA4΃hp'Q2@Huo כ%}#یJ=| XW`reѩEʹN$Qi9%=e "BwH"a"3"uh[ MM35ɺP#xp{-!`iN`G,S$cgJɵ5!uuF=vԭ|Y+K2Gq 9ʌ9L\cŃkjp`LPc`B0BcĢԉ- X-" .kGgjM0n4r0GDfuC[D*싄 yZAڽtnCVk"6Os%l퀫lM><@8fVK;0uUbilM1-q%p% OBd)^i.-gڇ чnnA-(%c&CACR*h` PȖB!R p`tH3}!J0,}E E5L* #]{•Otbhx,Q͈D\uQHUkC[/0C LMgny EoA3M>@LYE7YF~.$0ֽď&r뿻ԭfSOUHz™2 Vnnf%Oݒ`y۵۶`UZ7Rmy(88&f Z#6y5~vt nǹ2`"cׁ<78942]Hщ$Qq6*\)#9O!*Qb+붍Vo@JU~ܽ eW .Ic;L֠a:B1Lxȑ,YKZnM@\ДiwONrw']GJ}TϥRS6xoC#!Abu6ɢ#Vh4//~EչJAc˃ 5ZebC<( |2_Qz\$~&ZKG?k&gPu ZhyPjt[< eR]h4NZ-sѹ)rc ^rK2V1Aׅdq<p̽8\-KPkx1m۟scAo1X}D Ufm:hC:s^$fyM/>g2Y4ͻСUS!IM&UjQ'* A]ށM8 B&nefH5Rv0lt]g{"#LOVHN:+@x kl*UBr^b("(S~B5,'B1Gb|^"ퟝ|)0?nց %t[ec02f*ҜQ̓Uxx@ӿdj*e3ډU!E)wys ^r]QB498MR v\^MЄadoc=΄ʕ К l)x^,珝|&y ErRlx4G)8( g*u㏰;1o?v~ _Yŷ | wR\,)ˏ_Oodr`So]}~ͻ+ģ緻wqyK& y·KOqQގ狋x86o|+l|L>s̊7 0?I?( 됇Rs&|(e^M?e..̓R_zQʊևАw8wʯOዓx/n[8Viy0+_wOWJl5KE_KiR \xO/Y2+_\qɪ% /o;l'f+k~'+ӫ};E/޿Z>x1N&Iw|H'';b<߁>(|d&E~in>7okm5]{;vy5+  6?y.l2ξ߸fpylٻ8W}XiB&C%[ցPHv$~<3~Q[#Evf<"Yԇ#^>;s?q/y<׷_?}|c2^zN[~|w'' /YK3[7xrb~polFrh;ysz'xS^9d/{q\z~WZƿjgӺF_ e^x/sPvYH/lNˋ (I f-ux 4yҾ 1 ZQ#}xH=@ .3M;rxz.`|bUɥ_}c}/v-4n]' $a)#xVk_R,LJє(U-IZ#/FTL} 3UxHT9S*Ta 3U=0 ] D`Az_@|Ica@Xbtat !-Vp9:>YǫXS)T *H6 -@֖ 'rDB\k8Bj ^|q}ckOƓҦm; g@&YqY|iw\jG@Eޕ"sZ BR26>Bl|Ň:co;&W|լ(rSˊ3I V,rStTLP 0^LJ9_2KRmys%sdΗ9_r{!l7777 N%N:ْAY\IeȕԎI3T p"7冊XLs_¨eXjKǙvߩԗNmvml*6{!#<%τ$t1*9f6ӶDv3miLf6ӶۣmC򵴭Pлm D`ph(2>%TA(UO%c"HRE2Z ]1K~X[4l}i^kz䐠W'_=Fgxo||%[OY>a>J/B={}} rOW7vZX{T_WO*/_ߗ}‰g\;Jku2#L%]c .1je}I'J^1"tۑDD0km32( YzJZ↵:-R>>KIRL>8P)⍌2"ZV&Qqs4B x:]6S*W?W̐(4!]df7 HaV @ TD4&2WzLlYH7:->D/IݐGkNjaƄ"G7&q]"Tt6f)F)j_j1t{ɷSwJ„,^YemyEA8@6 N SQrLwLTP藶b7ꐑUU WC 1M& LgQ$4Q9KOSb80;CCLђ J !2F-1sN$1Qԓ$S>?|y_@aI_lmy4 B(5EfZAŪ]*RsJkxW"s(<ޓA]bSFcI ?X[槟ïtHD`ybiq|#a-t{P-bMt(2 R(vAR XP8oM\N!!\뵗]zxH'C2rpGцI:e,Z~jT5bΨBQDB hŻ: 7-ꊒ$w 0\db|0iqNJ#%A2Y_G`&P@-D-(`9nl긁u='S"v)=f:ǽ{(YO)TR5ܳ~ vrݣHHXdډjy>1LVMmq,e0se{q97@ Kͤ+a]wCkX؃d"mڸZ[ʄmlU--]b|K GemB-Hft}KҲPVNZ6 .Zk^c?{aU*x`& نYmw;>z+aXB-Gbfnw>Pc@5̋vK//{Qj+^ "hgq]mW.˅\G [9|֝POd#&7D=5s턧VJkQ-~-|g,Ֆc-me"#=ĜD%v#.B-QiL]%0P!a4\iNT5Hi\0'*,QI=J@KSHTZ3Zƶ ` os'Ka>}K[@9QyUۓsF{d'*z xV(kԘ[ xsrLDe;>DbkI)jbk!ќfc&=$~qFemZwɿ/JW/FD]HX4UO-f'5Cx9;{|cYjػzvJpGVN 3,eqWțBqQ7`y4<6mvh^+Cnv 5"I{攽`'YC.[0X䘔ǂMolJ5jWxȮM۸9יTv3QSҮ]f!!s񖼧\ nuCq N4&2+[.uqQQ{(uYy9J=RO~uxԦ9pvyiu;!/< ׵~899[4%P+;oW 'PzoVڇUzIh.&)՞$&icր1V/x<9ۊ?^ [ H' ䷦ιߝ ι Ti8Tn‘myo~wn<2[Jܟx8ɼ~ړIԱkKS|䄈.(@=^0esTL?qx5E e{|z[AݫwoY#e'>/Q6%kb c)&1!@C*ʛ*6_/OF3HW¸IܰmqRG(KFkSǡ#6((Ayb3b7/e/bDkH:CbDe^Bs(%Kc7 aydv3P#&x?]?~0/ LBcw#(4-D,\b6W!hKU$YL:( (9QYkB0.)Y3Cte6oz"0tÌ b1IM9wcɊ9Msk4Xٰ.6hpJz|>\AS,c3J!jG,dfb|;aRHQ )[g"44t2$:ΊPlu bO[v[;Kj%:R"Q6&! !%F*EM/JIɚl|7Dhs(4j؂6)Ǻ+&k&&e>s)0GR].*$b=Ǿqp/IE7{UEM Il${>nƊˑd'bGY鱻5폌/iF9EvEAE 9u1B !'4vSB`l7wO0:oB@3X']1D5*=h`| ЁRc)qz{}xxvVfR ;\<}]sW楸k,CjDAcZlvPDGdT`Qz#9Vb#6 -#k#ulw.Ξ`Lg$w CJ>{RI)̮%ll2* qz@MJ\ahmF*ء2Xz zdQ:I:%3Ș=EȜ~Gu٩*-?~utū՛ÄB*M$1ZRʡӀmSK}R0m[DSFwF%t6uyu!lj]U%eryI66Plx4Jt7].Oٷ"Mk0n#tZUB΁1;bXXDu/Xuqkr3lw"T~^tgI&Ňs4OVc\j_V:] JԀE6@ԓHG%5ْaLRYv(= WS@:(6qPFǽrMTzYa@?+ JQaƻ 0S4P?*AoU]ŅVG eP</ 9e >XKP+`ZcayACp&5ϴZ05Dj; O'B8ͣk_wS`"qgQCX`7yC?ZtՖ.lj $*(H6u7pL4!&o@Ig 5򝾦n9{{AQ{071+(wm&xtYvţǃcG{Wq1KH*_nY SJIXn#yy;R{`ӆTZ4uoEe5LӖ:\hٍZ­[9]x D'lXU\XX^oP_7$NXQs3-w8dJʪfݾWE*paȺ%$#bc$4&TڣUУa\ 4+ < a$oHx̤zUT h!ѻLqZZlIA>s@/T-㧣QrnߠB#cQ2,oW' ]g#c~]:zYvkm_>=bYmk7/702F m/m/_7 ˯ }DؽIڶַ@2Jg]AL6J-#N58j*ЇQ_9?+աxxxxx_'(VIo~I&",?WU~w7_L(4`j_ZŔN.~^jH<ʐLSD*ֹ,m,6D0W ;12sڻˢ!WjR-Ē^ZsqOW|׏/YVAH5ʃ<>/V~\#_tro/_ygzUWJ3kKRP=".맞P~qC\Yˬ̷tÒNƿdApMv]WeKM[2ۓ֐[>3oVvVv+Rzr[:#7cg0foϖsr`33囜QmʹxԀKܺFy"_bdHQD&x0s1?oh1_MGaiejQY eGGfڙl^(emκlZ xe✛ *uqSL4ibkҦugYpLM =ʴzԅLZreɲ;(ˬ y.Ǔ<\lm ڂoE޲3E<ca IL]،UcW+~?ُُُWُُُO<|8̷IJEXΩM!淟xz̔ڎklNЈwԞ#7i!>8P ʡ?ԟ}֤웺u׳t):ٵ<|s罼8yL>N5?VeFk9Jewe\davaɱ</7LudM)1t.xX+KR] mͮȠtB6LrF"Ͳ72-L_)Gtȴ2-L)Cr, P$ZBsMc%&6ֹÙ&#zr12>ϲ2o,C{w'ٙ=YVt󁺽cFҘަ@JfmM#1S6kgo!>XI(~_߻O tee;CȜa%oCsy4t5|jDCW-SQ7G[i.Εw_6Zu[_hU~k~o*2A%nıKH!զ!x2bl$ֶ,)GHpjͥιo}2zTTk]ҫ$H5VK 6%7],?IwC;#>u[ΓGݻ=7=7WFݏkt?o2:u$dǑXKwD 89Ab0;) ;vQ˴^VOAMԶ!ƂB1c6ӕҁcT\tX8] 8ֹAfY;LbҧkSpmsb] !v: 0ƒ'z&z#n\8XXC0Q>a, :yKl]p6;G}_p3)L$3tL+~ڏ??ygp$B@=%lml\aeH:R2Zĭ +;7k */^ `4 ~|w7ʚ,`qm_?y 2 oC?6r(Q:GwӉlZqsTY[t07e1b9Q>S^),blY&~VٟoD\n))5z[Y'd:m߰Jƺ./Po&9cnmQh+Sh:cB&95zw9qpY&9H~(E}9CCy0\ЊU0m] F݉,KE&HU& 9ST<ֻwG٥xv;Es]KBhq>q-n[9;Ǟ{Zk:YlPkq5?И>dרYwP;e(ts1tK5 ̞aUZs%(;%1#qbgd ,E*MUaroߠd d&nqXUSm86ff6{jNYB=۹P; {x0VG 2?nOa™HaSC(Ӏv Х;it% s9{2&%v ?VW3Hv(3ۻ[ń0ORigݳ[^ߚO$KnEkB\;7b&-.V,><#" weqHtFPƑ2mbzv&be7$AjT>bUU``&ܖȯ</40MLʼ湱 t:Q?XX"PJ%9eCؕ]MԐcŜ`//cT2g1`K%wu;eӝЅ,s+;1y- SsǬ2*858%Ѣx\ vVf)aާJTI NZ)n)0oB5ZîWe·:k͝tUn)<3ҵ*jӻ*.:8y]d YΘwMuJrqOs)"㸤ޮ 4}<_1 ]$-<ϵ+,} 39S/Ѻ0ya0rznmQ×o*MQ<.įdJ7bOdӨC4ZbsĊ߽~fJL:Ï@-NvzLCȝg$s*)cH4,Zv=T֥Vʀrl[nGCEz#<+`|;['1}w!nAt;}tݮ~fkA~{̍gk_{ 7~_'[<̶:!nA^Ga3iOe};z \lEKKgMVWf LbK/_jt[pEvlS1R5:?TQf5|Q8:>:9"ʞ\K8k4uSt=7]=/^m6 Xi TZg4qSjUcue2`̑ YY?;>;HvLO'F$]wd~>t٢hF^n|zsgB7kmyUx5WJ[f=Ծi U'0Mx.~h6AΖ @dJP;sAHAG!$7dtNX撖ފ,l;Dh̺KĪV&\1 A`MtzJLfFw6v;S#"r7UԦOiN?7f|l6f|lo4'*t^s5JX$/ܿRԬt~YTPb5VXiNc_4jؗ5C!/R?铅9,aq&v:vA967Lm^2̛£JۊR3U;% rk-g6?`<٣+.d_stx*tL$F0juc0՞?~>tԲo ؃4 Rim|/ ++ qDY6 }Թe%F!7q#ŗs4/j KT2ZfU- ]pȖ2yIen191i=cK0GͲB3o6cK>6c{m^ZieE\(2Z8[-j2\rk&t_ $V ;)pF%}٫Xr#7轷C}iKr+W4"yx)ƺ/-7NBu Aņj9]GAE1ӝ[^ I;J|W=׎Ӈ7. w\ߍ Q!۪οn!`˴ߝn颥;J;F k]d@0La]lVfQ#|G<!huKu[s`YcA"ь_`_S27% ePImֻ$O^'osBr߲~BzޱT̿ɰi!bՍ^8 Q4:3Wi9,V#/l!+\Ba{?grBADlN%8&)9 ՝ 4PLRapv!UJb SfV%r;ޅs ^AH!]ͰK׍4L?"O̺&2~;X I^ǥ+㱢P ڭ/^2o9͢y%Q829D4z]qB x:Gv'IQj(bFJ>Ј+-(rn <_rOvs/o]\57Rk3?l(26JKHh6>ֺPLWem*DFUQP4ǪTUDI抻$ 4ڟ24I*~*o %Lڠp&@<ޝq H! &Z!styhEoBo>,3VA2 ;qwi fi;yVMy[AE%<+ƈ #MZvHV~K(=XP̄ByC}K^hOyxi>8(QH5ڷCCn/J0>kxgta9!bDO gЧe>glڴ 2ڜw 8U^|%`<yqI&ɡ=ƻf9.~#=~әm23QӉzy-ED_ɺT2t^:JMa`K*JI1^Q0=))E:ZVWT\!3'`Gf/ '=#`+t)Ad$jyO~zFOrON{5c$SR B$T"+jm3(!rMLQ uwl>/9qRN8,E&F/!K,>B"/{tFq&;f\{\N"Re&j{ۥP//8Ge X - r.A. 33㵩Y`jGr 6tۡt퓬%M_8FwȷmɊxᚄ.wS<+yrhMʹac58uMqri۩eԺ`B1Z˅> ZeTVrpTNUFe*աZ{{O`KS] hEMR葂a |'Z=.wL'd2+!"fޛ;gτf]y%$9 */U^q^ZEFF֒* QʪRuB/wqmݷ~;2 zȄ 8-wxFJ@n6+01=9?O` -N3R3)hoϙ~(|([$`|ؠhR,*i$d&k>TPZdP1 pSVeO}Ϩamly<;6:q0 ap휗K&0too᛿;"% h@d)Clu.Y;zr ጴKh%. b|{/yI˓i.$Q E/mw/ppS*.V )Hj 8\mf O]Nf_{S|wP$~\쇞w,sw\BM݌+&)uN\I!b0A $f\BLS,hRgi41v0L])x41ozWj.iƩ<9- -N2:7L)L.\ |ȴQN6j Tk${i['i':(9Spi4N7L4ӿ%2KZKC^#7HK"~ ibs(瞐CfR۹@Rag[5x. Bj=#jF*fS-DFaT`˞nCТ8+h_1`)ҤY} ?m蝮cwwO!pr 2춒^77!{?69/3JAu~ ˅LU|yjJ ;Cz0!vuO3tP_uc92la:M^H#W,F"RWy #;srqwxr5#˚5hiUi*yU\k&Y y^!ۻrxMYO[~f;75ֳUGS'D ѓ7MT\} Ţ \hf<;+;;vBN6$KBnbFPɴ>AM& "gn&Couge+l3NpG W\J]XCpq4jplddA`w[6ۓG:bOxߗ8.jLv&RTdf@{Z?|sN] oF+pѻ qf#I[\40O,9z8q=CJ2-6eJEc;gsf(m֙fK08}6ViuMW{5]33X"m|14Q3kIA1]wNtCF&Ԛky. x$j{f'!J})kk`xcQ֤GHHX o_ }#,L@Nڑ_zk~uzdH|=ڑ_-Jo=~ݓ;{;4_iL> _Vx'g~4F ϼkԉ vj7 1Xlq-Jڕ_CCrK3XQg QեV,D\[>ϚBQb(*n=kNé+N+aѫȊ~ lQaˋ 2Eohtc8Їa8k .I4R$|$eSsQc3Pu6 5a: YI  QȈ^s V;%4q DS lJzE& )Ŭi doI:÷r3$A>)1YF^s9=+h@=¸|Ѓ֌{NƝ?3oeYE\%H.AY}OG!7B82d1LNO7_^Idg?£!{N9U|rq0ꇧ@#k?O Ol*;ʋAN1l,uA݂姽9܅7ٟO__z_{4t1z<={R %U:x{B}'KU{BDOA U< j'U)lgA+as?^<;?,ׯCh zsgfN}`ohޫPJEe7{ʍO;_i> 0[Ɨ/|rez :yi䧉ōN T./ JarпLFӰWyГK^Ǘg7rloc'J*:' qJ~: ~X4u4cDhP ӳ8 W<~W Er},t^:YfڟHfJj>9;{2:}=rvʾ_}0~g@rL|ZE Ǘ!Y^_Uk=" 1QXr@fvei%Aar3Ŭ^YS.?X:AX 2SLhhŎvZ0`rxeI{U@!/ ҆l9(&vAIl +w( n5( ?}NW^|Bk $[Zuy _f쏁2!ғ۟=QŐ躘eu0aV c$TbA`"+aK)DۉXi߹&Ieyw.C5}~^!1@ʼņowTWr)zz?BF:/UOҩUV3P%+ckT[eȥ`D53z60X` 0S6,B1ô^lh? /o'y@{_r_8=v"ibDJեqElP-ഺ/~T %W]Q=l?,TPR@5T{dBy䙙x4,,l,p9jߝ9FZ&U ޣ ˌhf4 |@ ā4V\hcMk;>E' g߇r#SAkshX;WefM ьW6iVMYO?/l7UMWct.gy>):3LQp,R$inE`: |<0y>#>ڸmq bCW]*/h7DJנ⵴ad<$gh-qGl<8ČQFHtA!-v1i )r{0~HHy)y8hg0 suΤKEw(IMtaTɮv! ( 8@% T^`+EBCcp`H@6֡G3嵓Mjnx}xWz,FPX##6Y{)5AFs27S'Mzs+̘bZ-.6 eˮ6w8bj8Xz9mG >$kDM:XhxHpZT2ڲiCdW̾ D78]&ӄ har}N %yNq7kA$>jaEPD[ubè.jeX\rcnϟbx· o*X5tZ+ xTpj "14$õg$TYIJBM8Srf& ݴ!6wӆnuCx7 d8*r. C2j)M+E|p\ ,"J 1oҴFIlX3 oy 5s'BZ%}+Oc؄y(S *lR޽nG{n +fƁ,PbRg8 ^zОqE&R:\uª#jz  !2 ,LVF2YY5U&![aMX(a%YD[Xb #Eh !j2ԃys#\t9P 4h3 cJvd,F/bFւ>)ćsJo7ÓtfYvo7un}5S[xUS A8"tujwGQn@)junLEuҀ(_N9p$2 ֘ځ 5 5m+08dkKtA쒔Q}Cw0PR$iL|zzdX |>Zsbq&at2ƸV4`v5%|M3w N @ S%)v50E9))JL0h21T{QB\=0KWYcU=l [9lgA$ZhJ- |' e"S IDQ(&0q/Ӛ w9.jٌڊ "70sÅB&\(H35 ]ih Ü|L@c1H0UUR` @m@2p\em p #F bRJmBAmDrRA<%} e.C՗s2+#PST-Lx܁$ rҏ t*y"ۢowQ:%[yeBg- Lükt\" dq0SKm $ X+^G8ۉQo(._\,|YrW NR4(2.$ Ew2(f5J%$>+rzh~r^!q-Gy\IMEOajgotvY*DDh1X[L'.~{ .h<",.w0'`20}ۭbPea$`cx,x{}Jl.U>hi5Aj:@Ui\Q3U4ڗR틄ڕ]UB ś"V3\LfukV1ZMIS^:Qu.mObJ9++s,{`:Ieeٴ0ۇ(˰,)0@}.t}.Ky{.@ETdz1V>I!+Y:\lRq-.AJY72m+7d]&5:@,F}(oLKv&fANjyV<Ŧl9nZ' B ׎ck&ESr]z)aFy64c;.vуU kT(MVʶzol{ZT&d+( n6=?JȷÐoA,=sTO?*\wQٍ 9Zw=84pCI% LrXrl.ɇK0T\g _vwWeUi􌻊~HD>k𖊽.n=]ɕϰ>e z>.J"CO=u@ }: Aג':a*E 0rʦWWeDU(5Όl^L׷ӣC/>Rޢ9## JI&Q'KNˣw-'Z+Тb{Er*\j]#䮏E|BpWh}{l㐇W%看ӧd*ǥ{|(7KЫ6Mgřp:]DRٝɣdon_矟 sTw!Ύ9+z Fަl@MMhޏ'x75Jz1gIy]Q-MtwR i]q^tR(8ut1Fp@1+'s6^Y+| EP~]Zv@dOB*0*+૨QF G͡sExNs3(Wu:v\: J)kWB#+V{s+aJ.3lSig] `q$5ښbtk!Wܯ)Fb&0(!lbtC1zALr R&2ms5,(HRL떇Xe~>btV+n- {P*X-)3K05B6s&a's^E>!cks} hMQəHg kUepӼaj6=T"á؏jؓң5&J6OXC˓&Z7ى^H63ArbZ^bdI֪uojI(U.ͳn zMF7 yoܓ~>_5gAh vmtPޣzˍ5H:JcOϾqhJaj6zE7sUAuM|10ea?>c$^Slg7$5NmQ76e;ꏯ~fSFޭݱE]n6w;/rRb ޾IdK:?cl¬և>;ܟJ]MYD_F*mS4;ΘZ駯k>_o'|Xmf[*y~Z_-4[ה6Rq(ԭeyj|R =q k,iGF#t F\lz-;d!lq.=H{kesOe(@WE"5ƈHŚw؆Crp;?)A[5?rZ о|l6mɎRCF+KXQ^D:$xH|ꠜe(BD+'s@' y$Ӄ]i2(J*F}`TdD$B/,JS3u䵨xG/ܫtG?ӔN;#tA/ԻhEx{yIq5ՁO^``fN9F"V!h[ f15iZѯ3QŔi/^?-'?YcÍ_܌=}u3 S\s hLWF66u3Z3Bu~4.(_."{j1N+qKj6H t{6ҀȉHn3 \hGfqR$36L,5Cp  45qBS{ݒͤ+ϳgG;ϰW䡖Ϙ/b@,.~4ߥᗗenƤ8 h?i[ JHZSySצk zJWKygFlpҚʨH*fHQ ֪O@Atc\DB VEhxTüt/+`= cʭ[S 숟!<^vӼ=R|Ow7~>Źv% L}'Y~Ӌ']}x{y35/88H.hFy/wdfwYv3j)m{WQ\J-S+:oo.nn˙'˯y>^ϟ7{¡7#񾾔z|5ľ` k=Z#*:8%HmFnu3wv L(c [&>RTE'e5pZD*d)-+A k)>Ŀz@ª9 z>,rxNb4Ng}8bV`7ъLF]`D_x  {7 Q@! -${ڹ b)9e$Ser!Ttu%C柹Vpms!l7},F)ΚцiEB tKC}R)ZXρ[R{e$ N(+@CHz Yc3f3f GUszx6OX3e//'Awű ITPYASTʊU^)C5`XbWx;G@4o_2 ̩Dm9d9jpY:JHш.ڠr%HJ}ie9~ϺK;dWMИ P8쭌^Ps$.!>B/*s$,VƲp%c% Im ]fεv[6+E ɚ*b0s m/Č4bU|H욁¢–A&6-ykUмEvoi[9,]t#;r 8z\ОuD٭#wj_wEN;"sKL gŜiCڝP5HIHW첚 N;k6)fѪO؁N<5)(Bȶ<5CQ$4)Q8žX6:MF]liX-G{EF1nbnC7υ-A뾇ȘaKXA-0*+ HB2s+lYkwX u8f{Po 58~E Lt?rGE<>瀋&tO?vmkwtδN k_q@_Oz54?u v}qPG5Oh2z1/:{Z19UY[Xf|K6hD|NBV7p$>3-ءCɡeilm( =g@&Avh=e&-윎M(Fr> Zm'JTgݸs3揤G7C`+ekPm34D:ys=]Ϊ ~eg$gy~Y?Rq/z3b̿wݑ6FK5PUWR2UYl,~=ek0)#؊컋qf ӚPoYM% H*BphInwU-ڵyyWY N3*:Q y!p+wR 阪~QtpMw7wR4Ǩ(j({795 ,ݐ7f2Q,"CF7=S9>tЊGr }{.;ש/ oY}6#@8oь9܌ErsRB6j(N3 Ab=t2Gt=^tIAR)Qk._. +9$:_6Wp1szK,dzuſػ6n$WXr wb.;} `,f%J!)U54|18"F;i(3dÄČÄT3"#6Өp(Ca֔[*NƊQIP7WWmmAwӉ^>=8LDZgF(gCAi9gozVOׇ{b3B/AvqmwPvbroI>2rY0<<s!QҘ*M'j$M'[ Jr/&_4BVO%jsSh2 ]'?PuTfFQV4!I殒4s$?ǯcو1C|0Ы(H*u*:~(5(g+LI+ R-Yx#>HLr?1*|Q<򿢗 Cowz\0NRjNbsM׷^/z?ɌWf#\nM8(q0EgC،0'ZF{>iM!뿅ي-ʸ;ڎ!$xMԼah-Z-(?ݰ̕!@$~Yc*hF VPDl*W:O0MCsx#&Tk} HPu#k˕ n)iFTz?ζ2KiR YNkrGF/@^-q+a6n?n3L`[R\XǍ xg NIBw-aTu몗M7w-$qԐ*A򺉀I&#CoSm<:n9bxr!gT(sg]ŞAՈ*ݡr#jp)b+EW7^jlFU.^Vdzk}>i=6`s*z,jԈQhsu7w6܋`х<0!V43ļ914nQ)o.k~6Lt<a|3Fc͡zCV՗&Vs Yu_0.QP _<| ~ Bn&g[OT `eu/~15%kCBko .Ogė~hN7c˻π#wQBh (aL_{yWncןiw[dø}4ZcO3Pko6߆g{}O6(h]:=1gP=nm x#Y#V%4Zg>SjEU3)I[230Oj,ygfYǶI[ZL |2fb7Ѓ&ب.2(x*}-b5c]GCm*:ᄋ?B[/t\%Rs"3u-Al#s\dJ\dn `g2s)P izW P D]oaN0,űafGx1")[-O=pLpȧ͓]WR_vgÜo3B4$=›ġ-s.ass%f qv1@:g)UWsCMNgW/S@䫵[n.xv ]>@d6#HIo"i|-i^ER]3v,'/>n݊f\lD]D>v @r~kr>SMKt->g2 z"`j9_ݣ79*< nFk,_^ԗ1,);r>/5%_u"JV:&U^HM˅,@QI";0T;d~2 Tί 0RAAso訖oNE} :TyWE)we d}̅k>kD3RE|ޔ1>ZZ AJV0BMP|=`a}Jͻ  h) G0FY ƺ+o'ޱ1Y>ev_həG>0=n%= vn-6GӻW@UJ+ryꗋWh(!~iIY}#- Ȭ7N^PUOAk-hܠ &d6㒩L -SWuhɺquYIVգ>v񲇚bJj[uRI.ggs9o0̱$~X2Xs0"Zar}azkar~6*25,S'y.QBH`\s41T+F skΙTh')= ?D?F$aԲAT}0*ۜ\de%4KZs bŌ}0 <׼՘͟-T]u#6%Uܡ)(u5x2/ר^_৐z*Tsm'H~vՇn-2Bh(37c!x%@VTi@v z(|yj.#g cL:$0^jZPB#)6lWdN$jX !3o?xz.@AV2ňQMzfpRLr ((19+ (Ak\1+wܱbTp ̠ǂ[3k Y)ZJP#J!hT{7*D NbYohAJM-D'["Vj׸Gd?0-4(~ASj䪔% grO8LP֠R-ty1:SPm[4,xS48T%n= R;.:XOiB xŜm5R­UX(*u͍*Z@kK!6w$snkΏmMǮJQ֫zw㠂 iڮx&L[*i&P>\]}gV: w: A}7Y[ޟhVx%*\\tHd?u& &:,O}o!.*?L5ٖ`u((w_}ƽZJ3xy]-s,"lANL$mmc"&}[P,gZ,leïdWx2SW:y%;T4':;>[j*h:* ;WD+B\j# u]伳o݉ŬE<aG12-s}KzZB<3E1CD{Ω/>۔gNY.  :A la̕X.7҂pA be@u˜NjؗOkͷ$ L+.@PW"' ЈsN<(opc.BB5RKhECcE! s,ȁzp$1z %@ DSVjM9=nB\Haqa &@XCQ誠 T+ a91YHT"$/9$@JٶHp8sbbrZv/hń"P#%cRˆ$L-B CaUh|;R$Vq-58nG̰y[$_XZP :vv߀|a$"yӍ--6'ԣ^ޞ+ѡtR¡)کX> ,?OvaX W̴1\*@615A)^ ]q DA& Gw .ZR\w}t^!B:ן\̢6R R|eY2%?mWA64Fdk͒EaUM "pGmUyI-qk'tƃ4e`z b}Wģfn_]^zŀZ qp.9ZkTC ruJ*5:K&S6؋٫/5挪1nyFgaⅦ,@3P[CqGDyڙXN.("B h1۪ׄ`OxpzۼBQݖNBkW5E* NJ~Ypo#8 -MC5@t+rR3g'"-8RpXbN4 s‰(qOWRCeAj=5"EMQb3!3njT\Z:s,\At!хDrPu!9-2-DC$9GkuQ` pIdh$wh:QM'[d" /4Vٺ(kXCܒ/B?M " O_LחmcII{^#ȫy9mC>6$0^ z̠ѝ57ZFa8;c鳄b`on6m;Xʻix\*"{iay彤>ұ~ \7oC'pTKdKlel>SLzxvd(SȘ4V[lZL|x>Quk_8\|Ixw8NLޢ/ yu7j&v  ^ & ;xzz'b8mDy/:Sͩ) F"VGE] K~ J`jgmEȗb;6_Ibm- r-_Iv\/)%x8hX <p<RI)߭tYE*J`ԀYڢ[R#@z,R*Yg?ja5Oj M% Rbg6F9ÄqF$CF[ESJ-#y=[R.)v5FKmN$׳OEە'>w#8̧߼~ )ߝ1)"?箂غX;.[/Wr{/ޭ lX}ƶkbfrg?8v/{pa[s?os/RcOzVZ;+O7r;_=i7Q:C([,!FvEe+\ںEw4Ժ!߸v) )ǹFJam5R$~> ˭j^SqJE*Ub]ÃzX^pʑ%.++Bv. 0AS (rRЗr";d1GK5}]j8BvQBIR990B5Ecw¯6,vw=\} A_ڭ(d@ ;ͨ) Kذ0QS]E>M_EZ?=8bu۟Jhw/ Ӄ[j1gZYP#dZނh ߐtׇ͑]$i `'p*8LO׳hT{8޺/}؞Ӌ]D$~):P+DI;wQelSTweF*Ρqɜϥ{/=>.յcigϹd'Rdt2^z,bn:\?E?XX,noAV or݉ǚ,YV[vH]9tE-FEbQ;TA_XOEЪ%>nZg/> 6d:<_OFfm,&Β_uɆ!qODxՓ~cA?ȑЬψ,U&@Yd@@@ idE?%]@3@y UȂ?0?1Z(8|A]3#dv5jSLʄ6v$ 衯$e[H*$ 5#!2y!szorI&տlZ~~A/r!(].N7Αxhޏ\`Fr3  \g5m&,pH[tRo `S[Rrrt9);9h >~[S( J\<}4 0Ř 4YE:=#r4ʙ}vD](M=kwvdpC $$CcۍI-CۈOv'A䎢$f Mr\28SX -`&KlٴiN 1L cӯ&*+mLo<$SRd"E維ޢP{w.4F&bF HO~?($&yE5C;pY߸$9ơ{G8ޙ?bF-o="Z8A  (AVl$oN4ر77˪ɧjU)lٹIYZPH2REHxӃY-F_9xZ002qk@$w^Dhutva̯[֨!ZHjNuY:o|%@FH8[}~(^#G[`B{Z C&F{zv w/>\Dk9+ssma}ZQ 3уJ,0$2yFGqgm^~~/(&x=Rdt"Q=+*%L[Y2m4M9K)-Й9͐Rk(EVɦ 8oZ y(evXïoMJMGt<~GړcNDO Wg^Vv[o_ Jb%@ gB%%S^TKK̙`9DIIT.KĉZ)#H R\495 \qYaހ(/WmnZ5glu?Yt`Mt!+8(ư]N`+V03+.4Q_*Q@ص7R*-()K "$i6EuNQFSb\PpRn $h`k\'$#H ,8Tkۜ]ffzF_dxA>^,:L:Sm<;/xZ7qLV*z3 VIϏ+wA]л/mސϧ1)9GǼLKi)` s %A3ן+"^^ob;W$& |\W&'ݲ|) l~O;4HnN^+z(Ɇy|7Neqkf]c$4h!(\[ggf(%|vӄ*]2/BS }cɕ0r&W.ʏ0*tkYh x,4 ) ԰j8 3!/p]K؆αwWjnM A, ^`)aU%iFܣrʙ+g>|8:e ^*KSP B.PV#s),Sem(j6Y})Ի7s'lABR* 2*).!(8+%9'sKİ)8WgZAG7{߸_9U4($8YEb7ȋwADe @JTr\]F4v/ 1`]rSFɠVV(@=J<3A `XhNŷsdI,qI C#an4AɋJg4sY9O9qWcA,Q##{ r bslqAJJOeg=!Z\]\S+i t^v5E.j!!zQAE/*, U@k˒:HH -lÈRHq+KbLO@cqgWlb!%y0.c`"XX}|Ϳ~ʅk,Vy돾;{nggYu{.2$x`=YH8YP'"$4^_=o<ôrpE&õPϝ i!mZ67w/Y1awbb_[om!GВA!#h,p= մֆskqk-E"뜆k_/hFtgt@ 9BG=c4Ժ16p0ȏPuvS†d%Zl=<}d8 ;ͨ) ˯0qSȽ$)}0,Ɔ L!<}bQɬcWd8FtVqFчcJ0w{Qoulс#`;or%D#By;*%$}獣@һUt')8Π1a@0NIO=!^+ܸL[0w;$m=X+ @!CDú`TɊf,ٽvn#/&+B壹Ny){J:h^ MMq$!?t^,Vgk\l~yoGK>u'ή$޼|P멾4}B-/Cpi{u F)5>aqiO7f`YM! ^,7CXWl.G\mوLWbVd[u*8]UiFiK %()tgֱ]2Z#O:Ծ|9>+'~?ñx?|dvv;5b}ߘ?C&m?|(՝B"GI3>)\naj)#%aqO!4T@)wbӫc)f.~ps?yW@@7~07qKNJ[[2OܨT5l ;tZ`nԾsع?}4Y,n"r$Kh]-eRja6݋#˱ҍ#u"ﴥZVJ=m7ڡCS']-QHԕ hsTX~Ugǻ*ԍ1LI?[zV=]V]k{e^j}=-}ou$sd<ڕmS4bK- #0RgJ%G2yㅘF|T֢OC GԽD] bDVcRqhA KS"Dѳ'e!Dմ(Ô GY?T,!(*ia<?1mӕ5y% "(9/[9OwշzϳEH jGg^R #HL{f伎 žA)G4/ۏNQJkۇ?ܔKvpnFN]ko7+?tؼ_x뭃$ݢh䐱[R$97}gt)5d)UZ$hxs#yX,Wp13Iғ-@(nƮw%3a<'Lg^<=DN&W@K3 q=oZZd:T3F6d7K?> ?@5T|J Qf@Cedw>)P +1Eѽlsl%xm Ukbl }Qd@a:J9OޥϟcS@<O\^=9 G\r{G*gq! y5ᦂ`ʛ՛f#mO4ڤYT8}nQf2v/g@.QZϏ(>cM (>ĚI6Q*p#-+&Y3 ])Qt72?N%%j\_JH"|;ڕnIAt>BD\ ݨ p724 '{֣Z;a&P$.ܯI򃤔.[d8 LWrK E B>sK$ {eﲸ.NqƼy>궛N#~sC\;Igv`i#]eTsV azן}5_2Mۡ x8vte]V7\E3cb6 6)1:T n`1IQqpJğ}/fDʬt GF(N&J?LNu&C{1H?h~jPO8O\gw߃ogZz{kDY<~o'<>|MnOĿ//.uyO^ggf7[$i_g˟y+F?~z֓_wyyAspg .7E{ɏן^Mzv.L _ތ`&hheKEpG`x|9^o9+>|{{_~AI湿1YQSy<^ؗMgW'sKw{7N1м9x:ދru}i~~c"Ua?]{jzƶ6hh?^7Ͽ|zΧ9^gd # nwGƐ1HҏQ >WO&@O!^߽_;_S~Ɂ82 jj>^/5?Y{ҿg/\|׿>aT|f`W.59##.#qOǞ]lm(Z 6x=3a=ud}ѱY' *r?昖\؄$&Mun ޷{=6F mgnx" ?Ws}홁9]XSB-ƋQ +8 70w ʥ\\hbxKCfRfx){w/`3&Ie6kDcJ-Fm|VMXU?D/ýqxg )*gT^)[#A{* @,.E5E} ЍI2cV0"xӢ D3$Ȗ⚃c\ٿ~]^$۝Ur7t+4$mԄt{w]{>T"hDnw:)B׷~7GuS_)E]*:v䓪ANS}{~g`cm© C%V[8饣M= (Nh`_)j[ j?BK(s &r<1TD9c|Xl%^ uG60^f|7p92Y^2 e/>ڞ]cK* 1kbgҹrݖcGm$,HH@5SR^^~`A&z6vuȁmx5"uJuNyEZ x`ă,5,Y<{Vuс(ڜ!3a2 ۋWejKݨ*Lu!] AUt N؝!ߗb>D2-Zr];Wi%LN\Q݌H[w)q;cUL2&ش :C,[Ŝqnsmh-V`Yʕ%KOaE!@i cs%NKxnP%ZK@3kPhwtd(2oȼ#V[ȼ8E+13ScA `spr<ۥVX J UtUHK[$p:so$c/gGRӓ|:<ۖGYgSObKS,y4pD)O [E),EGV@N [ bXl$;M\R5PS C%8UStia WZ!Y"GXɪcXx2eY Կ*$(`5rz!-8 &( C6a.& . 'i% b{2pڬQQނsƃ_wH'qqҔj ZJ^=Y`EpLb29$(b $h'Jqdr@A%0OC| { *!{`ռh%jfsoŽkx?Q3(^]\}3 br?>d/xw^obIDpR'?_^?=ϲ?X"(=;8o / ?;UtG(tH"257gĄQHP8~9q;U;,4 qD`>>;Ԛ/3بR"Y@ԁZqR,28 FRS+`14'7 FDJM=VdEz289p >H*8ÛVΎ%AxR2<ѩ!!`aAā1pJXv95P.PVݨ%ڠ\=1 oUs.c0SM8&DA)zA#aNX:nRt ҫ! C*AaUZ UP#:QP-rSj zVn*O#7 !T2uPs(jRiYAG$x{%ذ2Y")(n &u jRC`t5.0JO \XIIrOS C0/츨"8ְ N?ҟY }:Qi-TjU^\b]_<+9^6p=ԪYɑ; hX!.%Pzjul q"Sҁg4.L].ʞʀV$!jrJ-' .©GPg.{Y+#;`7ݷUJI QFrIe@K$8 ǜHM5$Q{Ƒ Evt/ !}HF_mm$Jd栗8{83C/LUUWwWWo ! Ɉ(C|Mtx,=S ~-Pm[~c07Qy`:QИQeya!J+I9|-GgR9S9KcaC+FzI8`"*AV9ɷ1<zqe7g_{Л,m.| ^e ff/lDuV"[)_vJׇM+55OKgv.:buGP=o:m r!R!u,^>3-~ۛX4m9syr^ofE`6pq|z gݹ\Bv2ѧgݻvx_$B؅ܩ{Wz1^T:N͒ڹwyE'`ZO'/ <~b-OLd$dd5oЬ n_ +4qD(tҲ|^E+r~m%okc+Ym+ZJN 5og+gV+7x̶$e_o,;1h!Axov+,:*$gA>㋣XaT:mma}˺WʯN\oކs>/}2m Ldͅl:/ Brܮ($J !|AHѤcUޗy`}qDe3n^!H=y\(痕P^6s! S%9g;qy3*5o 9ץ56\EÝf3h ZYw Z՗4T=ӡ*qVpgH;bbx;l&@e;Pctq}h0?4pl?O1M"Һre> XfcL8R Nϫ[GYMۓ5.e\#~& t1wZsyx5'LQFNJw*9spz) ǭmu(RuDEɢt?hVIpH?d:<#=|aNgr=&Jc4OΪ~txRGҸ _\mo3ȊˇXudfW:s^VqF? \ IbMލF!ё('D餇gd[=?U3I[^ ٌ[>cF1AlF׭{b ¨=9|G%uާbϨ`\!x.27%TtDdȺڈcA(sB:8T1NFXi9F05P4fz&jQ^Jv`m @D*& <y*53;$CH*)J#memuuu AƊHL Tc {h !,&( 0mS\6kݻV:ғzX(8oh j~QkV^{UT@3Z D9xod0JGØk壥R^!mZt8 bP$;corJaJ|"V2J'`ˋD޵槁TJO^V`~oaqlZO:)*{>VӑdFS1FȻߟ$08$/:EDL>OsA4I<_,o6u!38Z%' 3JHIfy?_xa)B%bV &WpYcuKLFyRJ`q_?tn39L)+ v=߾&˫ۅYm4z0]⪬?/ !~F''~mA}n"jtiڻ0.^?n?'% ?0>4s^aKZ)y$ ]>]˵L61.͇?p% w"F(/Gj'*툪j?A;TC蔮g8 !;!B.;:чk+;RC֝RUZ7P1hmUM逮U8A;TCWeh킮s)U9~ ]CvDx]=WLFQA'VYհHҪcjdcѲw9K"|d)L#렳Y,!_zΦg -R2@=(&PeQeE ]A凒&GEie4bo6\&\w^iiT!\[EU+~ 7mƊH-ppAa[p8spյbDS;f4"a|1C=/B *,jo"?E\tXdy.A0ENҢ OzʨSo>%ǘS8tk fL3,Z?&~!TmZiLvaf L ȼesEF+đ1\*#ab.2,uVm(IKi GNa"HU FAj aVLRE ۥFi RUq2lsnc`hLP 뉐0DL <` ř+$kYҝ3M,ID@"&CKZFp'|Er-I&dUtV.Mz߭?x!n7'W(ҁDYñP 0L"1g!(WIZj {͕QGEO+bF .~\,T0I X AEI=6ƷZߘD<+`Cϑ.`%lPZ`mS@?:0Ҝ"n@82 edR {zd@ZF.J*gU[ FJtFs s$($ƴQ 'A9ZC2$K5%k 6CZt B amMDP%76zvɠK[0X-6"h`% $=/; 3-K{W0 3P06D4 ȷa@&2*142m]MvwdNq}v]h`!U2`[WWүX`LdZ`V +c]Sp!irW f0 8QķVKa4lqt9N2ڨFV=T %*@(Ƙ_"vɺA/4YǬq-?_^MSbL7 [kk_+7\sڼra'^&,1zR0gv ݭ#w[KIQ{N0bk\<¼mf{𣄜b8Fx9E1m LJkoQx1X{JB'z_VBg?x{>3C,ބ?]&f6fkl3s>/M>\~>ﳿOb.ne+L8"_AJ/g(imݷl #NbfZ7N%, ľp9_#m% vcvz;^m`te##r{Q6$O3U.vVߺwxT:eS|jQf)\EfҩE6N)瘨hx ەIlγkٌHT(2\ώax#IT2tu=ti;6?rbT={%79E alg^vo;XEn uDŽ?ȉЈy[l`JQx{ʣ()^,)gAR,P8EV/.UF`%}i k"\NENQQ>t] ƒ]pw '-sN13JSSRON~eJf8IjOO?v)5y_g$]΁axt^hP-=)|)drCLIwh!:gdW`YhG~xˠ2U;:2]o_7rʊL<~Peetk}Ч҂Sg"t nj4c}YGYxxLj!W2f1;cvz1W j187cv-FW ]]Yv>h'5M{'FX\#-Zp*EHwm =ŹZNz'jt@( ńΓz[3T˻ vȐ`-f&nf_p-i֛U} 8:9oP7*-)0Lp>Z6u- TQF<$x u҈&̧&*Щ0]z$OU 8oDd,RHEV0&on*(,yAޤC ):Pa.x $ieeF\TC(LI]RƦکu$s'P1K,Q`vbJ:O5Lb b&H:2#mRKef1Nyܼh&fp{u>JU՜pOH_a٦l gxFl'kJ0Gܵsc屧 *Z܍-h8)C% Ud+2*ɩ"ЮUH_wCqB3~mG`eRJX:p|.&\הM#,*mĜH%I}nɔF2 :]f')8D`I8XwUNyӃWhݟT"JL_BGd 54ձl/0#hi#o&]alXs6? catTADI:$b 3\Af %UJ2}uR T.!nUf9;PW.]Cm R)B]M ,8t3, |hiWTKQ!L]6b?ת!5jlJ\bg!zm ܖ6Tݟ#=+"R9: \ 0C%11ȭ)Zpݣ:nyuWsI2wYu%BLm0b6]ҥS'k;o[I%?ۛ%rz4f (H2xTIIO X6NSsF]]G#]A^N{84M[`Ł4y0 ig;a(,iB2p9GL'b&0NjgoE>~\(₿{~I`|8dRJDY묶"q䞻d@<.|6_Kw͓ߢOOh1oQl)X٭d47%ꕢTYXhbQ #ҋ- yɶRIK oq`smf!E= `&_0RP֣/;֭k1\up`/K_z'\,֮aJປ``EIWRRWvkvo4TFfM[@D44YRL+C:JDzD-FcnEלPS>9% qY]~i 0ί)*Қ^4=p7OpIWXc9TQ̻ܕLn}|}q?Ȁ.kHW C-\@\QuΤ1OQ?F}-s"fBzyՃw~xݹG7Sy!Ky@3"_/^S. T5^ˣwqxu'IrtX_9s:&9[ uI[EZ 7m3&ƿ;(D_|.Sv-Y? Mh#F6Q O\Wf ޓj!Ljemv|Slh IU 1b5++eKnai%޾ni+bvHB5V>_n 2FrPVV؝±,Ke}l%9?uu+kՍRNǻ9ICNGO:-l_5x)gch.ƕbbDeyR<чlRLSW4X؆% ߆7̴fͽ28& ´qiͩK`OA~x,9!yF!=ADwngCwUfPd 6 #luî֬I^?"[66Fyjx\k ӶĞ@c?}*1`S7d`|H@(6;)&1֖}QˋJNV2s^p))%mgJbœbG':܋=V >La`!' o&7laS646µߔLR$􄵦nR!N/ws>#mLbL4t_e2ULbI&M,YB93*3Qg/F>y>.&N`450%)}F)Fʰڠwxl&K ;|AoR`8nzˏz =ӻ=XfPЧQX>/*osA4_^\TvjgovQeA8gdEJЄ?co^>N ޟd!9 W0wT8'˰Cz|g\G1b̼=8g]iݜ٪?+:#2wۧ/Lf8| XO ?Ph_x2]X3sB34w`r; ~~M>`9*{wWP|Q,?4)O8nx(ARC(9mUB5'MgoST.E{ 9Ų]C위/]<\By8WCTЬ+ ŋ8#+) *1kq.^EX":j,u= u)B2 Ao%{N#.j/ 1;JthZ4v9Fu[~ˑ\zH6s BpqԾg`qFgr#%M钭g+(Sk2O:WL1TJ&'gWΌ a}g9;A djޅ?B)%T",3R1~2pjZI?KZ^{kF\vm:Z܏z{7[A' 7%82p]4- qoE}}G%璶>a!׾vpQDES6W}$c=pќٔle6hB۳6gl9--?7z8;,|>i뜼ZjF29W~$\3MT+A!J=|[dNHi2uz&^ɨɨ4Z 2CXRgs6HkY&R,cx }URn5rGU] Z[QbmϭVq!k&XXI! u Ic 41tz`m˳]njhK= eh5o^W߂@*xZى6,r],)*";ScΨ;˰e“$Fhn}NoMRa݆ǟgn)9cђB2s-u=&h` 5uF 2 7ԸmV c> }$ǸLU]J[k+VPN[Lsۨk[xQS|jP3$_3^*J)-j63- j’&$SsĢdyOF) z=/MSB9𯌓J`G j aZ'j|*Zނ&%4q.8JRB8B )`Ejb)WX9\3,SA4G .IJ$F (``8`2b}@)43mtLA83|`!%mzijf x*TAfˌd %!0V iC* <2Ch< %SU$Y$@ 6HMT97<"L b_HnoGjDFaʋMRCV9sݖ;fa:T"rZb.b;7 i.(< JA:4Ha4(i]o8W bqMѴX\DQ[ZN۴J-zع-vASpC΃p7M *]]C݁S|Q皀](Y;@!F>9>֚9>?.k^ȜN/t"TcVy>wYzZ Aժpпފ[qȐsĖ>Vb3]-w-uw^Z9{5T- .ݹT ޳ ܜhۥn[-;:~LQ=xTLYSyAcJۻ aZ1%:{$LDm+zHq1z,)~|wjX~}N|{+q M[+:e{(JιJ@{vXir9u9$0e%S!!yBFpPLJJF6kû?5ŭUC5h蚅J|m@UsWD(tsV* 5NԏiѫU4tJ3x^DrjKVכUMyT\tQ)7%]x:gf|v3‘ eCVpN\A;B&q"V&jD52J ZQw!w_,gi{; cĥ>"LnJ2቗p1u, VE %.{8-ju(絟ޓ/-, i:x;rwuV56 FY=vAto7*#AKx=VRCwcERPB}htS?l LRLGu#Z([QoZ6d $q91ZSlmwj!&#_f1"ȝ\ JN3mVi[rmա,6xԡk*w:4J*[q" ޛ5ePl #zN{5RX>tJƈ;um~r>p"aq$ XDp%RƆ87G oy־Nnn/nl |t޳ G8v8ЩLTæG .yɐ̏/S0ŝ-8$y-d!M*KȠW n<+7lm%ؗMg5d/HKYjp̟D;FDdh&pp>"*J/dsw ;aؿ3X7>˿kmpDnv:D.}mjV_8甐~>P +,GO6fn֎Z1vuGE/WxAPzEm d.>b{~Oa[=kMk_l'U)\fvjә #NCnP ƓϟUo*7ƞdS흫Ͼ2WiURw6޽9:Lz-2]0u?k IRTY0N@Uŷgu@tM<E6<7LwNXΎ(K}ǚTDSm Is@nHZƞ b_f8s3uZxw;7|0Ɛ8jcL)B#O0(S\q~\7"Lw=Kl "PF0ŒP[b`aEC3 YaR1G[#Nn1Rf5bo{á3x6̫Re6ؘT{r,> j;yVe(05Vjegm {}-' UY_ATQ[ xED#EBeiCxBaPyRecj-X^QKyQqE:+D61:3c0崊=93Y:: 0@GDGIa%XIa-l!t$122VcAlh-")L-{ #K) 7%}; GqYQ^P=4ꂷ4g}Rp61,u |Yn.{tf!v:=;N{vPƕq8=q9&n sp>;ݟqfgbxHA T(Sw]H8$cr z2FGD ib6!:±NbռJ8Rtz{̡VkvbX SN5 ,>I4Daܤӓ؄0J*-m.O z}g)pϯyqY5W/_?_t4g}/?ӕg?Vŋ߮ΟzZߎa ȲӻW/7uulAwM_0\}}='?΢]ۯhMA|Vˋt6Y;W]ڝ]DExNų><f~ew)f; a.wޝV*9vwJVj|rmom?2(Pmϛ\gx|tJxGsG(ɫ.|@S*" {N0]҇OcXz,z~a+ɪ1 +&&ɷcl}:,>z>H5+;'49x2[|E/z1/c զ Q5eaV>$`8 `p~1|j$狉{^<} H o~^dz?Y}lOT`f4L 2&={+SZswO&ٷ`}/+޿z[ lm2[`ih7b:%^{xvX]%ygF.HH,Ve+8K]M`[kY F6PXҀ$*FszYtk` V˜m,^k~}3NsELVll{sflhQ8;Z\FM77/nbLy`kG ;HR4}®lx6q)r¯= @JvMG]gSi/tKwH. uw1St-#*/EiT O.»;x7u)g_ Iik{k{6oY(& Dn. }6L{Wx M֟TXkԛ%UoNaB kL74S BokL݄m̑Z!ܳ2͇3X'H>b8TB2^zAPoft%y\C֏e:-An`{T͆w/y5a\DōSNE Ռw[޺sfn& svHrnEִsFnkk}X,|XUSj*-'\]Isj(R?hf/VQѤN-anlP`J_HF&c+CdNl̕su(DR84A# (5i5JUx{u;KWLu^^e4\E_҅+]q]hӸV{"ou.Of^g!2S,IxD[UH(6TGD0WIL.,%2Q3+hK(~0ƑƑa$ * rQO& ;4u4ǒVU۱8#jPV+&tX i7R؀GOEE,S.0vWWYdp1n8qw"Զ YOVrf%"Q&G˚3KzHe4Rt3)TcͣX@|SRk?׋O`j=y¢#YKA9bY-rxP b|b=6pY ,eXm' x`I6 V f:V02u.0޺MZiE,%ϕQ1;؋a؛Lq@8@rWٻ޶dW,f%@Ivqpv1ff ,y-9M2%RSIQuWW$)%!$s/_c*Te>U"J Q&?F1TV]h|NYI⛠XM&z\<";PjGqWkdP;&K8Bvh fwa!8D"?ꌈY!~'iFw([*-7|"N!čgx96fhVgm*J[q㏑5i; JP.[oHEpUJ*b,Ri&nFN@H8 ^:^Zd6E/`A^V_YUI$A^LlŚ炴J,X&x9z1Z%:.\mgV֦h PqG4 wy4sfg^*CPܬ%|}u?[ʵ XA\4 Ɲ m($a!nѴ98ѡHNoAeh Mj'unv0Â2":| +^lOzUd'rл U@IK5*I Q'JI8C 5c@1ڱ QD| P/FOm(fJ\YuC"s$=xpeH DZˠyQ1J$?] q#ދ 3u6:Uxt2zĩM. .znvY Og"p<Ǧ䧞k'xRc@\#iw5-΍}@ ,vNv(Ȏ 5!AWN Ղ[12d%PriQ t iꗩ!8uC 4̜>&G\EӶ)!?OMW@7ly_alZSn~L],\-?Iٯ7!&dބכ|޺'@3zER1*K_c8(-Z NOSx'6[2'uz= měOoFSOl˝L󃇞8\rSgI<,OrzNr- ? `C&27yC&?mg\R4Tk !DL9eZ@no_8z<;bN")+#!HIk ( &~iEjc^XWE݌_^~w5~shS?z>-\^̌2wmcpP.,Us VdcH,)# CKL3HՈ*`jQhw ᫻kւO %sY$%F0Q΢s 6wFxS"2RIX*JsI1<<+%*+C Tқϗ;qܙ>X#xF&ЌfîG|>VUW5Y}w_Y魓ᇔҴ{ݚ3/kƢ..clWCh9xc霱0@Q睳<¤mpJ)+XCtnPiQy.bŮ(;ך ݚ;#^s;Wba18Ӛw+%stw8 3ι84BԌS 0buk$*@.8ɂPl=lnL@yz7Of,|O4NJWpL2/@?.=*ړs$$,0VP1!8  2qRF40u&ՊkYkY Q˺x8 r4hƄ*vAa`ùuTC=e> ׬ABU{b[%$U9P5էw*h1GǤD$UjtZńژ5UplNS_GA" H/0E̡Iȁ$\&X(dqbI48zGR'R3#5L1Ϲʤ mc}h5H%^T׾ >.Ч8MXG4ũw{X3FIDOˇ{$|'!ΝO7ҿLᏥW @XxoUe/_gJ_m(&mo`_(v.?~IXg|Z(jŁWOK i)hnE{_`J y&{=P._ν9{P5G\{7W!f`/(Ogx13x&q~?$/^6t؈9\ g$ @܇'55s\sf]!%X(t{} |ʑj.?K&p AiB-/PS)cXT.+=օZYcZ0ä@(_ Pvf{ZNI PEw67uz/n{awv#=>|O&._ E6gȍ=\:G2G: Vg @7 hM o ?=-A?ڍ$0 cU.["bOc=SckMx"DH: 倿h?>P ,kq#_P0U 5&6 hT!\w*0I!e7 ho&Lk$յ:wN Vrn&kj L)'" XlM 6Y,l=o-iJbDrtq9IoaS*J!hLudd*fhw?:d^5׆JO{9BH]B4cIj,K\*wρ BT"`="Ȭs)'T!A`!>p  ?qr ~9c ut J7iPD4M]7U HmNJY{قudlQD A٢x)$Ηʶ`2މbd!Sq_D1 txx㴂+Q\v"']99Gˁ!'<")) BmAڷds; +|@eۍE '' .{K]8xK//4Hћ =khz$9 }vlEd8J{O$Q}T#Xq2[B3vu^R&[F˟(cOePcfl;fK܌4 f5)suٺ墇HU[nsQczw;NK@&~BdC.!m`>5[}un.G]\teC7On[\jbʗ2W|Ř6,F]\bݠ,fW@뎹?sBvbnWRǪUPhM1 8T[Gp a%b)8@n dժ*aGr Ð$o}i0ULAMJN-Z` CP2cYgj{_sK|(A9E@RT㤶8Jbْ%Jr,h6,!g3$t')[qs!Z(\חv!Rb}SiOw8sS0PJəB$5hh$9rIʌN:"'>NJ0ZK3Ix@Vf|8-h Qt٫įG ,hzc4/}5R;|r[Z~> g˲XeYe8i"-!}rz`z!}EFt ݗo^ օ]'4>40|Wzǭndt -"zx[iDWO 4WߡqH);()RG3Tn+ -ƌ l42 !a , 1Ѡy1 vkϮGyK! ` 6}4i `RU˞4 t%Zߧ;0-)LԉM &ȨX'#MZqWޱ[- ƹҺ")P+6k)k B qJK&P9XGyp D$U*$ ǭ-R0gfիG; mC4ƒR`gr'{L:6::$hEa5D=٭HzNK2{.$Mݩ!Y9q*qϨsk9^ 8sZ9fNĂ< {+ h`Extr`rW%oSKvԚ?C6\ *@^5:uם^ .[a-ɪ嗂/YRRSϗ?tꭔ4 `BCb}4j/2w>5a{@za S$2B11+\d& "s Q T DyYxE{do.RR}^>Q)JToWOޒ$ܦf,9X" rZmʒbL@B"Y!B1ȧc?^bfHݏY*K !o"Q5 J!BrڀwӆX>AyPK!KicRQ?,ODwx=h(coݢ#&!Z̧K.bG-bLtEUW qPLakSLSn@fUdJ h N1jv";PE\w1B-e"~E/Ӊ`0?WwtLrU gt~}kD^"碜~q -_TٞYw8K|ыы:L W^{ukobUޣ7~hH멗M5$"g6I97ӋWD=P_X|AK}ko#Ʊ0ʹʃFoC&(}17R"몍 {C 5'?i=GK L,Xη'\STN@47E/A Ve\ZNَ\(/L[Ju./stb:c7ޱ>w/&1໏K_yn;'rGg+;Lzgi-$ ~,p8 Р,}u$ceq:S&A02 K(9:!E6H$+nw̨Bz26KԂ4۰i(,KӀbf}z_v6N}lər#bm4 (V)1 aEζO;9SfeCueH;ILg1@PP0*|M@Ҕ )LMsTk@,BU Kʳ-XqkLJn>[ AL].ǫnjތz آj_<'y߀ .~[ t%jq UB4B._즔^/ ()PJRy3WӷLߣ"=gڹ)H:!@[0P3}ۧ򪾔+إv}(Bϧ r`2[;~\݌Dz,)uJ fp`2jw(.C!p쭌(CfI)3ַpi$A1䌔N/Ae/Nm y4~p5CGU(zjI Vϐq+WP!.8bGd5e ~Uv*=Q{ly/Gt_ 5-ඳ6E ۠6OmoaÆp;St,O*(LQig̜S4߇0H:ժ'Ub0G~) ;g :zUar5p6ϐS ǐ] Mq9',; kNCu ! W*RGUc W6_g7|}t7YOf`+wpAكMl=/lWz:'D†'_ɗyC Dj';/nxm:QP `A'#twu/v{v [};ožV;ožV5޴hp!G֦Ri#m)HPbiQDpBT 1%~ J+GG;S& '(uܙFPi6Zcl LdA"85yGwu{^MwdGhfUf3EQN֢Y)arpU~aS̮'D7R[X`$eP: )rlBHk T[+;ݺsY8 D0 a-5qhHpH_M9% 9DHtJB:;a\U0\cY=[~qީIXbxƴdݺo;ƾo;οeXHFԙڸ%2La Y(wRIv'kLuZu*(XWp-VPY]^ \'Z*S*)V|sQX*%zdI+q8vL_@,0~*|O?~3l͇d2^_ܧ0^p/gߎMܭ9b:u{ѶP(X IV 5pVKGsM,c$$%r=Pa~I- sӽT]9O|R%v]Tc+u6 qPJ4ilT t,Q12q~q}va>Qbp'&)(`5w ηڼf-Iey}pHz mL>$J$!h]9ݤt~gR SG-21b]4sM[O7^xZFٍ[lh#7xh'x7J$ְظ1=AbH ȫ滹W>3q|w~vrdF`H48!B}2Lp,IڢjnSpN3$kț'iFHd&Iƚfu;5XN綠錐wd:SrYI[u&$3(}LhS/_hb]h)1V%4zSj0P \6%<ؚ`?0_D5cٶ u-_ehQR0V{zix#e}ͤ9uIe-ob<*0mS3!kı_nf5_R E9!q9Dn`UEa& :+z:/F?<: .b)\([d d&Qf#!߁ws)>o:7pЧ,n5ID=yV|8&M]<:ӟs.p{8"`F!}-Xs bB8Hi,e`))Lm I2햆#AfGIRJ ݲU$,LJ Ȕ(gOUf&djGv7݋mZw)%S=:>P1DL N`w߫J툂1ap硤m\B!B]Icl_mYa–bCBTE&)% xN>^7a2(Yr(vxEܞb3WMRyS"t!FP\q$kpfn7'&#É $@,ATq'F{ D {4INe\P.?)e,1fYՂ%"pU!݇M}hLp<Һu(Rm`H݃h^L( Udt>|*?$sgF6?-Œy%\ە {8$M}!!ل x "B iy4 ZJ3?dlXyfSTVI~ls\f=WYUY f6BcT C`A^ DV(fSBg?KΣ-t_7@_/>/-l,e]Q?גq$-yz?^sHeUhO~{W|yj Iv#e~,Kߢ-`v,dh8szu= "اU vX=i _kGrͯWʤ8c^+yy#[NU[dYEyq(TmQËQC W'D #@ /@3["k ),"3 {d!o6l\.8cM`*\-rwFMHJU< /+]a΅`PE{:Ѡmx˱WWY :i0 cX6~'+/mGݯ[]U+^>sZsj _j/UCF5d l9N@^l/psT-ӛW]Ǧ7Υy7$D5m_ho#ô`y6ag$q[d5[Ve5[Vm-|@^>:F3 tRB/fv%)=}ۡ mN%1/¦~D&|ܢ-|ܢݨm|J`fmGHᨌ 8Y IX[:aЉɗaL%EfTA1(;SYҭPREQ-w1{m A ~ՒA~*ǧ[W8ᝈ e-vHtYu'*ϙ eb'l)\vje;V>>-Z  | 1c!ͨ5U`"]R~ w!yGS} {L qN@7+Rɥx!3DKQMq2Y1[e0hAA`ڋSmD^Zk29dצɧ:{bG-{bGX> ^Xȃq|>z4 ~ `< _f}ũBiQTKsLJlRAQr_Z\pV[ =q'Y{ӧӽGCsl,_WJVꠕYMꝡDfE{n0k|9V~HQEy-.Ƅ_kG>o W=ocVMp5ų|ןOe> Y] wO,6ǃt/}/f_៼vH0']{ϗ0M.ԇJwr*C!@bbZ4"u J.RvUi MPsrL)d<:}9IꌖZ:\X/̙c'Cj̹$ֲ\f<~w+ź`zg<ȉVfkMx 0@,v:V>6uƏ3#7L]kbA#3= xg?;ZXsPyu[n+=wHP4f=Z^N8$-AUUp&1WU@ǀbWĉ=ŪpECav;=@V'y%89E9dW1DL!`˳P%rq@dw N15W"lT*.Q' d Uz ς([S\||/gOדw^5X?]5N&G7Aq"S~*9ރxPVڀvlV3D b*{c|O6-}z-ˇ ~-::~zxu=Sϗ'yyMiEw/8j}9gE.BdhǴi◧4߉=\m-ce.@k:ebONuDbBZjS3=D[D35Qnt|(7JǫѿY$Zk t5c柠0w4(ۇKsX6܎N?>x/#qiqjonB{䡛]7 \xͳ5Z^aix13]iVx %C Ewv//|Խ1ۖX@ZM|ʋEv{?i<ݍZyExhg֓ $N<}*0~[̟ﱑu5>{9m=iH+ƒi_F Y!Ĉ[>[# :Jƴ:m o[)PZs e+G`-poeD |yvޓy0&דNwU~t Ω3pʘ3B@UJ-.Th+~3{;X|[*O]!ZW0b$(%bTli2'a` DJWX-!> g@6hbhr"52pu<LK h*_%kaS j5 `2X%^q)YH&yꫠY}y |vH`#5w:JUV[MVz]FNZ6xNPD%`F3+D ][c'' kw C[ʵ- $ǩuS;()Yifl)jM*(9%A'; o ;췶p6"bX.O]N- dj, e62ZM&E( [BĢxs,Vdi-NtBT?mPԂl]VJP-ҹK B{Q^F7=rzM":٦gVj¾wƠ^i7nE=g@]ew;I6[K@c̓JUK5ۂJ`3Et/C\7QLcnNga0 wh2ڬS?lR~B20[wX@vg͙-f1~-~?=9/v.ѿOΦM$:*WJS #S|>;˔..cllL@fWPvt+`ȩ2Dщj5FQEgIl8!s,lx-/$\Ł҆CreZkO7g+Fjz |erZ),0?\m`eBdӻNN/s:xPm EtA-q+}frSőbԊV>_9a-[1?f _:a6"ŀsY@E1Z7bDQUD!3d{vQ0psFgq,q-unf\ rԮœVzAԶv%C^uJϽ}a݅_zLlwB{B欑E#BX/pw!sڠuנأA~)VPϼlմe-%eR5gbx{khGxͷf|bX:v0w[34Wf2E$:%ս@iػjaɻw$ZӁ^^ģf[kK^|*ӗKMMSگsg7G9hV;IY!ݤ݊IYQ=)m]bՑmȲ">%c Z| Mۜ\**[wEiQ$^Zk0$d1sG]OSjor/r]g ja4@ll$0?{ƍ K/d+p)'W|\F"r? % /CbphŖunt/oR/"ߕb@bt2剤 e)}ƛQXTK,*إâ-TGDm5##3yny i`DXMd.MdA$DAҺA|ž0t\1#Mly SD2 s|8QF`OѺ-.cK7o^'A ! 7gw6i8#jr[YR'RQH`?}([ɦֈrE:KʹbwOn@fW4Ƙ* -iITnlO4 I5ȶrn2zg&wu9p-JźǿD!(n äLqq彽V֠]e/?qJ^tʢi@-q=llt\?7Lr_kLy{zaXqxA k5Rxz.Wyo%,u93{3cp"-|1,[!)q {'ͱC_ލnG_}=(ld (gz2jDz6RZ~s.&cGPIQ$Bs{ecR9ѴRVBCgz^m==׶H>HKĿmP-w<9w5 cP4S VFue.`2K[oܧQޙGW*;# N'VAr /x!~Nխ@A޳"_5cP!q_JUA]A!YVㅤ:+P8GK"*#0woN~rOTaq<`"֟ s*k~͢I6*a*(g;̦ZR3iMM+\ɟfn +NΡTKs8ksʞp(lyPb ?Ra<\~<,n0|`mLMۏTJ%dTV]j}q5T/=\ Fg?t _vQz!e~&6<|Q@aaSl)n4Ú1SM˧ReJiKuwhEVz֢;GVVouIAb/tZ)\E_Uڐunz̎{]E\?"&A 8LVc}jݤW}PyyXNmdW,^]st7t?\7nal'uJ/rfW)e՟ĎQR'¡T{V)O}EPi נ(Xm91`XUmpd}fe3O$!/\Dd m&v DtbhNyxg-bvK!!/\Ddglk7zab":c4n˜df7Qv_ *q{s$pMRG)z uy}F0ϡz͝}@FAttUEBDtS`*DtG`2qD"7іIeL %s2"-&6BKtVhqThM3grj^09Đ\SÉAdO֌hˇb=ݲ Q0^c-ykXE>3GK3|1Ǔ07_g1ek>K/'݊SPY_NX":ໞ]ˬ=\W|I7"ǭtkyXH}rn;̌2EkǤB;sy5W|uB:x-9p06/['ikgn򍠗!&$k]ңZj&5sɿbVNdwf?Ϧcʷ~NWъ,{s=Dѡ臺|h}8qhG\Iy)Ճ5!^¿,Y샘 o.<"5cS)i?ӻiϊr=@^iq$뢥qVkH ʝADen3o?DZ*n/ǓNN-tԋbp@[^kxl>-+UCC\ak2t#-A CBɐ15EӬIkbE ޡ'*!?{+/ĺj\ҹJ=0%J`-A| b6dCrc4_7Z!GX/kc!C|*W2NKŻJb\b!Tjq!/c'܀L* )"Eׂ?OF&tXtEAp-x?# ю.^\!z/~5kS?]_5kOl_,==\,8A8{.{N9I{WH躒ǖxLVB"ͥI hN=}I*5-Q/PnP(I~%zIA:J#-:88n\Na1=TC1g`)M܈ ,Yߐ7Ī9agp1dփ?*?1&F^dcā1| 7PԧqMZ$! ^h*u&=ljs#5Ჭ 5wSi߮N~- Yhz(Z+ ܛ;r_5EX2֗dzX[sle/r2y:ËUԄx JFy M`e;'*uV İ:UbMkhEhW#IUVEY&I*j,=Onxv27O@=Y峯S6(cX1̔pC&-jؐ9"(Sh&~QVm_&7>+3*O|uɴH\A KUFdιp@JSN]nsY:-DU.ɔ>ceJ2_^qgZVnڦßL^]|2(Y }gf5Mqʉ(_',-⌬-4ߙ H9ca!W^zG^ff4ݒVa-UE&"rqڸ*b[+!,Z jZq̔E4Lh"Q$[f \|CQR'Ğ hѕuѻR*l3 *AܚԎXВ(!da eÆ[ͮ|uP tbWW PdfThclzNmoҀPSg2 ,1!HsXSB "6pVfFbГa\_:Z!FpZ/ D5"mryQ2 :U` 줒h*)ᾤrQ6gL%uOB \^#&5S˪sGbOBޮh<^Wv5ۙX]i> k0]⢛OlF(z<#kKEk''K¢[>qI*D~&W{Lkqr#a7O^Tp̓Iك_ t}f&~g lG#ho E)AP<^ TgKA= 2.8]aH#q҇LDFf=R+@^@(0JyxƃED 6τ@UQM23%hՔFK=)l/YOZB$ԂcRBKg3O犃Q5h+FEhDcPr?ל1F % ]@h$)W]ګ>@ .yK~}4UT`Eseofi`'gWgv0TAQ@3VIGJ!$SaS!AHB)UȰ03Zlo`ƅ|Goj|Dw8jPI͋ knZ Aaީ%`֪}LUc&R (o'Sa s6D_m[hм:WOS 1uAQӶԒ {OƟ/DQ1){c_NYLٺ i?i~{dkǓ9|񣉝_{xb{>Ab2 ~]8vY$LbkٴuL`g'|1Ęi%n9 q#=l#H!`rVwE畃@au.(u0vtn a*=HBj@([ Ňޙ4qXo-"ⰑqYLJ;5kݗE "}`kA\;4m ĝƖ (+ƛ^v' Vױ»ylp,ItςFOf7`m  QU+^8?_ZVcFf?xVg!f!6][tlhymUԖ2OկUMaYʠ[.vOPWhh,\^h:#wа)X ֹ)n{X,ܢmP;k[gs5U.iD0iS(&RՉIFQP2 G݉4E? n Ơ5%L#P`$ ËFAO'#4Z+#ȧ&:.8;;s] μ, ;q٬p7s0Ւcq*rYmÑ2)ʉ~?rt҂tҾ Q=}{SXIY_Kt %i>A Y*-*El$R@7 J yՠЗ2b¾2[ sR`~7ԏ` G@@1cD&2AJA4C/QIU@Z ٬$O@#3 f07I9 vrdS[.T8? :6^y`O=q%խ{U/ $"9pjjkFzN,Tk^^9J!s2cJj; *(Qbie$jI/O[/|LŶd SE)u9sTbVZ0u0g afke lq=؛tigUH)/ta;f|e(9BURW݅vK\]O vU<`҄r9J6 %+Y;TVY5I\D+;#ڭDY[jEVEk7e ŲƂJ G>F\`~db&P)< ) QT7C`ȸ2@ݩ2 %'%I\`9$~ 5WKSG0j6eϕ`%TjjN+v/HyIudSk܎?$d f+O7*\#3˹a `W"iѽ*E4*E:Dk[ lɬ6q>x9?gkgz17;2Ȃ,(6;֭ex@!s2䒒 $)ѫ.;iU˖)):-G񊱕\F/Fq,8ϗ1> >L"R%2$e[rZϕ]j/5pzWںu&S,4j#Z I+̷+yP_%9^5 &15 u(8cٽLcoIDӨ(cӨ _cf%ܣCA=|V3"a*)oΑJ"#B(2>EH]0u`feUSDaP[&m 54iI_*B/b{2{j]8,V9nm4=}aTכT L:["AU({D xUkqä !!s$SRw޶n0V[U N1h#L^]ڭIAVEtCtA}L Ͻ¢%c+V_p~āsqz2d~荼hn 0a1߬*{FL`UZgU<&0+m:o>ނf^pwdJ=*1%;)mSµ2;HAn_*#ڠ{'kT5&0|lU}&do A1!#+9o?Wz(9m$Ddlk،1LW{6Wq+wGF!%IaMD^D782&rsXl;yRd$I <_ʶFH,ă쪓@>\# !3$g8 .˄tGЌB? 6pۜ *0ϩ'v*Q=g 5-Ɓ`rlz. ^qv9ۙP?{A P7-<zNNĻd0ߚdw\9ͤԃY@Zz>4d>׮s)0JR\sb}nSᖡb/PH6ݫqF9pǰare!If%?fD2Ņ vg'؝`wve'آ0.ʋh(MቐDFjАzJj@G\FFJH>  'VYd06r( nM2 BbW daThMr+PKdM0Hep ?{V"G4e9fD_*a'Zwn2"A 2Re4" = oC$kdo`tΫOi(TYBlDMjk }^|D!>^tz|ŷ3)\v͉M/ yfz!?甴Yo/k)Z74S޳ىuwM1EyB}⛠KTî^\3Q}X;_sO!yև#+<#~2Ukq O#]&pH#W!&ѡrUVu(,vEN't LF!(cq$:᰻^+&2u^%:G7v>WӰ]/adk/%>Um^ۚ >_Ӈ5fw<}+bMߡio<H R02ʄASc!C:x^cxaa }"b37 w gVwE٧c%%'v<@P1|'CN Yl̖%SIN{G5Tz`ţEDrd?ܗ5(,Nz17TDD^`3':zs?OBL.nn]ݽ}ͫח+~`7ox^y{eշ^mc͊_ϱ/z=wﯯ”t)Q~[ 4p~NDbOB{XIb$8b/c~;o/.?p^ݼ)HR01Ο=0dS̒mE/g]γ^g:f?|;/X[omG>,Ǿ7=x`ADz0 `y9tO3 gg3N??cwOEϪhC%q|*N+f_% ϖKEwe.[.[?lju^Ϋy:Vjjvk${>\aR0Hp {w5χ}cIwb_e1\nd [8bؗTRӓ0QPNZMڼH+oS9e[ ./Vϲ͕:7/ xi>~ad™= &e̙Oe}vf8_8ef_4pd8!k3&]x)X>T4tv;sA+`ށxBgyk]ZgsYf Mp%^/~Ȼylзl.\p%OQ.f4$a}&q?{8shYӦYX/essI>.QӴiֱtw}\R3Ga(4G\^h:#wԒnGh⣛31=0ޕ6$ʐc{(&zʋIFa%[>Lu{Nx!l[_}:y<HK/=IDe$VN\ǹa `PDM]'.e`8\oy  vTӄOHe 3tמaCu)$o/^d(DjF"V4x,8 FŊE+ʘZz{$uI+q#I f%H3_fS"ܗIVb#lxW1BCRUy^QKt(N Gfb*E)g(W >2+6($U !`l gW<Ō iYesMǷ MnP}p,ҢOG8<d\C|̎6V_K'+g(mRۤ8IqnˊSfW%O3S-!8?1B i>M<+mdc0pŽ=UJE:\c.Ttatyq%1$8RzZ9R(~cf^ fQs qBf)㵎_H+CpRbt1C0e:muj@faƤ&a Wn"hEr]l/g:2r'My4`2K9|3䎈^>|Me@%iƎTŮ0$$ 'HFBS2jL9K8K3\ rڭ?:$~2Pm!DUN!9 ?JM-Vr6C"F Utg\ ׹HqyV;%ZKm_\R\,XgaJgB^@yn "%e#bO(0!i$TDJ9bD*W*!s\:`DiU%>E|<ݽvx~HcDhW9jٝ+9ݫ]!sWzxJMdwbx]9Υ X#QYZ̀y. itN6i^K9!3|^"kOeOcbZsrwF:z:PNxT}ߣ`QKZ2{sbTT#ǜS3gDXtZ?:Y+\5>,kVl~8sL[ }kg4-:R -g\sv [.#4=z9jqa4}iNO>I&&jߥ>]Ȍ1QNqٹH|b]{`A>h`ɘe];*M~]$A@#٠ 8ZSC{S^ȜP;.$ŞE!N8 f U)AmCB~_¢Mgi5+MT,|H4L0Ű`RSm%n@$TB48%Cp,FIUv*BZ)]Uj -g_\,hIܜFP;8fhblhlL_ARMɾ9Ψj0`/-a c`0!4@z_0ܴ Pf";Nv2`d/X{?F5Ig~aOk+Wt8Yׁ it7e6L&2q[fq*hD SNzψeÔP֛@Tp쭓 ,xx{;?įXϭucKotn?tCF" |wdHB;34w@V zq ALPT׶⒦I͐tKy .?*BSZV֪cFm|UpiyȞjpL4G290L҆jVN~xwv,'įǷZ"+j=F;'f*{PcvML/{z-ů ܥ!OVB%Pͬ3*S_sx9K^>ҁ`߼cid2pԻn(x)i@5לg}))tGRАiphpNڡ-*8fӀف,z -(%>k@ҌCӏ,A|U[DPq<_qZ1v#nY Yoe9Qζi)y Υ'WH͛ya+L[CRB' xKOk 62|}f*{TH qIC?\>9rm~ڎ鏟l_"mл7aw|{0g{߲1z}?1kyT u4NpRX֧HavלE Ym0/;q盙c=qc B(g iwY:U>٦uc) [.UT':t{I;Ѫ֭ U4K4:M&89떋Aɭu;ݮmii׶u?8^Ѻ5!߹f: & c`r1:uԱn;dhg-U[hNQ,nh\b!BZ$rrRl-UhŊA 308 1N@9R-wNSs.-B{N%Nt@vv>߾̃!=1.{}HH?*ZQQ8v$hk`Z0sk6a|+saU n\bRKtk$u fDkR4*HK!GT|7 A ϩ\ե{kB~3GU|Kj%Q Xmm4 jZ ]Ֆ"_ 3%!W+ Rښ^k`"8Qs,; =ftqu8b0)HĹQqn^6LZ_ 1N5\" +NL"xԌC{g D8Sǵ?k7 -#=OݵHuӘ8rAEvxG72J֌jYJzG4JM)g#V/ZY45J*cJǰjMQEd:" 8 ha)oܘksAbI$AbIYR4Ql &Lz+s<ђBcT &DOFaF3"pk*"m!Eb/,t3E,&r4gEXX&zB0͎3@bÿ{ſ7ğLc7zXfKk3բ>AUz%?6^l}= ]P; njJdMo],ZER.@ dq-Ń⧇VFT=N Ï`(C%1ֈK\pV(S:HڠνL i &]H5y?}.tg`/e"Ido񫻑}5U+%5;i|5}R®γgCD8a1$XF dRE;I>1#Y-Gq90RRSDub^˔ H"Ȣ J#Zp"9Yhi]Nܲ^<][0͇!#%y%H;#l 4j!:h ^"8' R"=Q6BfPQC% K9T~,{Ղ@0uQR!cga^mrFE5_riS[dMj|IjA[vƻT5(͈( HrŞE6LP)EFr3i])m`y) ZL[B)";4Q0(:B(Npa'Sүvk4<(d%[i J lߌݍm2&McݼQ6jf[Anl SEUg/i} `r!Cyw}GCP $ /?\4;P.]}]_ľ= C՟iݣ=KMꆫވ'×-!"Ɂ Vkw|Q"#fg†m0 z/{APFgާ˃wM@[Ѣ#89l c(9xoͺT^~m,!AcѤ~=ƿ} _ۆ|*SSygq^e12Z0[3W aѐ&1ZW聆>mhW!KC:n:ZK{52|%LCERvf3)ԐZyJƅ!A)J)vҙ7!u4݆t9qc?jBOA(}(h5\m5I7Cf ?k*P׻E4u,~@`t J{QeZW_]bh-V4H^ EnIJ"߽Mțu!TOϷ*\99W_Yz7C߬ZL6(`v^{w?OV|ǟ޻(Noa0D̫2P 9")D'|cIG9a lDJnjpu_Asͺ螷MMM(dɾL?Qg] FA)ֳ[xDn>԰HR\)a;Vw"XMYY6} e#m锹0xwoP6-bǖfDSv”p*e.lI:eNb''A^]N[6S 'L= bd{"#5'tWl:[3P~69M0~_/ӡ0}U|gKË煹mف?W_ РZ67g]7Y}o'vC)cbݫՒO c[kү\`28~үoA04=n f@W^k'ԄKq%3\цum6a:ۄa[g&'U>e(wIBcyq`r*'ݗS]N4}-y״ӿ< 0*x/q?&\1I?[oojw cB (r)Cl̕'NTO9&sb P~y_ _n|E^j]o|jeo2z臯 cu -m7,Dul"BӃ'd˲sD*:m(|@ɘxwk9&|6|=>z8xj\|;"*fӣȎ&(PU!7^FAS"pU 㺷v/Z|W$HC"eʩrM/I,G$wY*KT,#I4lI&K@ 6-@kڔa҂r)*MYHKri- 11R9j'Le,T Yu2A NJ-F_ mer9g69"D5<=a)5&*FkpԤh;S`)gWB֤id.ssT*G2AQ3͙Ti6FEO^J0̬U/%y~X[<.r1i?dS7H _\=ʟޡr~/xC/k0@~޼{Bb-x[B|ӛ $NrXKϣv>o?إ7.ξ];'/0Sѣ>U@]VwR8C[y?}i@r1ώ!/Q%y ZrC0eu8P')R2kx.y_ZJbVⶃg\7W CJsE+)I?#O;awu^D VxV\:M%4.@FDh"?7}j?4I>?,u8B6_C\ @|3ȫ=ѺMew]AXoyƐ!ɣBx#b*^Z׆3T xWS (}%y:{oWu7>h5 (9)ăƖU]MF¶F]x s&/"^.]Ѯ6"O_^[k돨Vcl)w74@5BNG̞Bߛl\CӤV0@?ͯz1ʿk¨Cs[K{'F'~zNo7w -0Ww y!dm}zT^g 5q hiThYXME \,rs~/k;ON޷?Ft:p_g5T%Z ap6R'0: =GEQfFPfEJ)8\ ^ŒrHŹuԁ[Y[+;`b)~[C #l``yJ F (RWizN[P6E`}pɜD.ߗŒ1gdqW>g8٦xO6Jf#;y-}e:w U:9a6b384k==אN1BM+=z0N1i!aÀNN[[hAwb*ֆ+G0\W 9gV|r/HO_h(]5o.&Ov'Y!oF}}7S? XtYb/n~^ϘKto?{< &95t5Uo٫ovDc% L"$:k=ks]R9nICr)?a T"hb1Qg4n AN Y聆Z64+W(fGEwăv(/]4+О$y2'Ulɓ:BC;{)yhpx fO.'$e$ϵ#"Da8~2c4wI]?̮ <}E0 z1tUyf?=m+l~ͫv>o b-pfZgѺ=i<^PԘ?"B;7D~u?|} CKu*dI9I'K-9ߕW0KPT>Vr.4ÕgHךʨ`^SFJg}ro7/_F^^z/h-I3W 5$Kh>Z+省0./39O dF*Fw΁PyY:~.ER c[-[ eẈ2A_ u W !+QVق0aSJ,(ukKRIf",eyj1D`)ܽ(>:KF($j )8'MF9!d%,5*YXaR$))*(:Jx20fqʘfJ/3˅PfR Amiwl 6ƻqfDO(8=XTTSpۜpJo>Tz"޹Fq(7=o.>qvxUϾ, -By %T 87czIӾoS'anm/gs1C˧xn#W?^|q7 fYsӏ_Fm{{w8{t~'ƾ}5?siҹ^`U|>Bx&3O> gLrQtmKp\RI1D(՚,Λٝj D" ,dF|ȼuҩA(Zk^J £c0 YpʓVH8 V62pͣsp C5o[:)TPyA24n B{ED#S^K^dWL1$?]ݩ6Rc+&3{ ?qj͵D4jft<-HPgzH*ѷL-iz~z{S/(}\'}}Z宎.]\˫ϱcӯ/~ayHsg?(cNyW~\ݏ&xڸ&5OM]/|&!Y0n>^>>I??rw'_QڻOW^_eh4+elblhO4zE]GTnSJOO򤃹a퉋pQώIT~8Ts# yzL(Fuĺib4nd] z.x6nzY[ΫZ[mW< 131!b^:PQeBr=D&Y ŏstINaR?h3 :(ʱZ;{>p>(HY*׊4Ig8Ty -pPhIWyH wQ pZQQ tqX~p?p7?톓E$r(ʣCĥI9C uEIB;伭vV#ENV3A2K5 M{t]c[SW #˂3Mc#;{uI&u?7~7+^ }R6ҽ;ߡqQMn%kP?ZDa\zޯpRh^TS4g }q雺sEHՐW։>5# }D*JPb)dM fm4{Ǡ[o'#&7?Ttw.JMNfn67-HnQ#ڟڼlqBLH>I&Tq7 bjs1kcoK,4k2aI:*D)&ZxԌסkjr̩SdF<90A3p3} јj&}uT Bd R;xT۸ otw*Dh#ZyUA F^E!S}&d %֧pLG0VDMHi'd:mS/wE5c@uFֈ#}_[0g=m|pIqZ#cT5#ZvlVgzKp+BCQGS| Wo> Nzdr"DM8sj" TzB-UΠVO<ꗟ?寰,n㍳w%@Hٻ yԝY#5RgGs&J`15>rv7',83pS)Bn?>8%{{v{♒:U))xq5u+5_wdTK`k23QJ1,K"bRHg#W UL9XuVcqVID3<16省%ڊQb̷r8x3L+)Ǜb&`ṷrJpC|Wԧ'ΟUFeې,Vۆ|h )`C:VKLd vfV˴y):bqY*f&ZɎt_ef-E T!:Եu#z5CЋ U}tH3,z*c]}!JKFPKq⿬ʓw V SN[+0p*.q-x,)\5~_bΝ4Z"*uBZU 6nQN ѵR+PqyaG ;ˁ!h\h˸(SgX:]Dő1 TTr&-;$zK@̸OedDG rhypB1 p!VhgAxUBہ4Q%!^VV%99`zFب2pDh`֐胐Ax:zJrWf3b, ;6gxm97d(1:UsXYtA1+\e7{񲪢"T:"opI|r귕BX";b+<+NP_"`@{Ge+Go]J- &l<Т9ڜ2uÜ̾Nj[>d2Ehpbg\+4q-{Lk^J $I1l%Iy&9RL%Alގ7Lx[Izq$9HD'dI6oG,\Oie u/!0diCJhFF ,Xa@Zl{g?zΦ>.pLFNZ&a g']>|z 9o!>Ymo w5Y`aãf4} {h#?=z\Ok}Bc![?=\OFWgL0-u 3a_Xϋȓ:v88E?Fή ;b{dg;7Hjwc+o&g T~].y/NS@| "OGqGN.҇9}.]ThY =UjIR"yb K-9[]Umq^5VvJ ZWk3cX e73rIzmcHAߎrܭ̾)EۏVې/Qc<)_ݢ}3 xxysۗ-FyZVz@G=2lI\|:o^9qB{:]w¯as^4+۰Pñp'k5 ,XY7.uQ>#+L918 0zpT|fsts=aqtw<7{֓1DE v(ґtBEPRv5k݊2j`H%;_Q,Q#jTvpvc[Uvns%Dm4{^׆I¹iYΙH8، hI~Sgͷ Tѵ-6.3*/o\/_Vrqu9ʝse 0 (sZ9+!V3{6=5wPC}x滟;pE@b IFcQ Y.7^\]W[0{qYq4y^dʨLH2ҵepӝ+?2L[q"Is7"a/lZ&ItΫ+E?c5c<- dU?W~\ W~x,EE1F0c^Z"I^W c܀ mjudz0WǬl q yQKa$I@3pbpbtM:LŎ;ݛ?+^LQwb.Y ݆hC3q]/$|Zd0a;] U()+je80`9ཪ`b؝hPЭӫWVpfkjp[P 0*]9[cYƕb X5&E10&'X [*\[)"\+צ)׀(ֲ,0YfR*UP~^M Eq⩨4T(c&CBPgĘ ђ` Ҹ8zH-|ZyQzB\q| *HٌؒKx2>D mo2VTX3 H%!X lF%cZ7JxmkF(! ~DA PKH"]rӮ:_ G Tګ5zXT$UK1*)h7 ]C_{R72´(3\43DslᮦDSNK&K>!Kݎ$&X譀_wՄ,iwxΚ 5c|AGƝazqim'?^}" Xct!6>]^$g]A#.ntZA;ۖWρ[;'An}Ξś)"~}ŋTru ?l`ƏZ&p*w&TiABIbA# *FoF۬45 p5y!39Ѱ. TLHz%m̺m1f k${hBXP>F<`kOC%=ׂ2A|XvE{$+ҖJЉbCtߞHcGivs01&t]is*\LXrcw03A~a`0'u]f4 z+.a/eJvQ_MQҚ2 fRZPmEiOj3. k!PhESE*/2fۀM{!>X}#nT>-W EoqDx4 @Xe8ނA??Zu9@>K7 V G'Ti0[ފߧ"uYA,L ɵ],NׅjnM~jk첀ߋ ;O1'07k!u·1]m_+0Gr5NSgJmZ{eyԬ;IWeNg}_c-=kQrO`doY\_ y"%S>x_1o% b11h޴[|9@BBFɔFwi7v Ht"h݆sO=I\EGjE4B8l2{ڍqXNMۀmS/#k_0Pg.eJHAM]5zfA`]j&/\?ުޮ:EstrGӰP ) G^(^՗Â\4|ۘ";>9="xޚOT([QY0_<:hS^k}pn6s^؛s60:[}>;9-N/fSTt3vɻ&nD %I3ˆp"8! % * #)D-3CHI !K . eql"eK3()"XDn1B,a e\KE%͸1FK-H!~Ty8 *US :F,7XbV 4H#BT^rϣ r$1DZ9 R FFaAf3P*e r[Ԗp kWG%"^3u6[ sW7w6302ajN3Ĉi2iT[ŦAfVbqnV7,ܣNO1ufX`:&HA&͹__́^oal^~oO4! n2cdRVR%ƹZ%Ss{(/,+aQ/8a3$R4 ߡfsXYclh($]%-.bR` AZKڬv1oN["Gqnuc"]6s6W?o*MZ\ Vo '`"<M&~z}2)j#ooӇ\.A%a_`EE\}.Y>BgTka*@`xءgL~V_z r8PITv)d!Cx8caäqDH ѣ@ Vd #\`CUV}UWpуAr}pF FEo!H0gݗ@ېmL^l@&p+&֝'U+c_ж\^ޙ^y=ռ(Dh`9]D60Y1 MSJ:eq[XaTe ZoJB+*  ^ Qh5*&HBlOn7*TBTAEьr aLJ3޺S !G4S_h4xUنPі-(%=&%&^=ƪ4VΠA!'~Ԑ:Jl s-Ŭ3hg#άCK!#|!}2uҶ~'6JMҠ뺕n~W݌oWڌ[m7Znm[ۡ &Ӭl3\B7?u,I{j dg]}t#QXJa)ײӽ30Q}ӚjǠ)b:c =!9V 廵E:`~ )8</!lLnR<ڻ;@CNSBZ  hMdEoz#~Q,mu}f!Gp|G"(<AH1.~{+?X77Pg.Q2ok7XREb$:c4nYk_K(P@BFɔd~i7A-щvУ<4[ y"^2n.Z 2,i&d1dM+>Y ^f_O .?+ͅddC OΏr[pٙ-O_.b=Zc(V̉ ' rIIDqJ95 .~t3! >݌`7yпLKuKi.s M*a-/慽uʖU_~trG'}ݼ%Wӌ )<FR"$ >˭2ypN)̖2%,ͳ?|W &.ʵ;Ы&o /ʼnm;VP {7NɌ+99"T)Aj9̈́$SGYA.|`J^fbe d..Kqs,&4- 9 ,]C޿;mb{aT,DfeG8 3b%(QJ\2xт יRQ`T:0DKB+ViEѢ3(R\IaL)caa3k%#2&M Vy%f) {h66lD^܁[J۱G7t{=`z#bͰQrTp(B⣡ )ew>q >n02YBӞPdC?X I($HW!v®kԛ:8>_|u/zSl^۔&JSC)3Js\a $s% MQ͒Vhc@5f(lN=d㔎{~˶ YMmiT&>~;\Ld8gZZVOb>` ͜16͋?¹#^ϯRL9 s@AHR;@QVT%] hM+\VDW-sd1K&i<7DS.t/G&nqhhRȌJV'Q8LMKPH~]_1֎$mEȺQDC!p\-kT͌!Md䆝3;?fv~Xggdp' %,5L#)PǙgX\2w^%#}'PjZ'M%D )=v.A)%ֵ]Y<8V;[f"R DdM*\PђAR&zRKiRפ7n1]nvhmCG'.Stvΐ0dxM"hL;EH"yrT>poN^X}{ ǜ=?}9t3\QptW2>`֯?[é훛g!^#Tǧh|0YZ wۇ34k1yurfhjs@ȫO;'v~_> l Y T\~ӣ)WPhsrtzcàdfZQh&:DKaw^xB1%6koJ1dYeKNozV*zA@@7 5A4PCnr?=iej84i O3PTE9.V83\忄O^n'"Sr:=h^sMeu'r374сXK^q!%X{wo|ܕG͊+x&P9cdbUM5(k` `e6;ZĤqHގRĖ]慞eȾ.D*%LmŪ+dr}_n 5/Ri6tiE&˾Q k5v~YXsE4xCH"YyܮNzavK1[Ҍ*q+=@N% dK~>rdhW~:Ւ0F݄G H0Ji~ jP#Ft-pܠ"F~ne ߁ GH9.r.X&lFFFjޢ`vkWMrm x_C6̱M(Cm)DKĺRDJ *txHĈeũIL,Ǯ;أ ݢ!:8*!VGWimuˤptw[dvN`:͝>")o&*4SƽH?W>K fEv (,~[Q:;& -#kb-Ϻ|Ye`_4w[F=,zu Pld$dh.z/0&7RL*{`J7."~SK_=k-C.7uw鶴`O` ~Q^Xqޓ*:̅gtf^FAtr%{/*,Pcl~jVw% I9oW+Ɖ]Щn 1~ZA 7ǍPc'4e<ƹ# Ri}0P nά  a; /x#'B ^ee8]Vs"7o#cAO%+"X9cdE ƩIx8 kJ&j8AOf2>W80_nyVƧzxI Qf9>? ޭJCv OVGG& IT? D˭}V 3' ImA@jult d'g̠ WP\51 FSmo`~kg,9WE yo&@M[Zo~wxvy:^ѿg?*nهD|:A_/'oȿYb5tkk4qm%jwuAFT~2#E^z ^n>XTY$ ~=#.՗TΙa%y=[M~AZY]] |zǯ?-y[/ǧ=R>긘_^s t4Oi9 ;)5X\&ڐpݷN\Ci$9;f/wŃ>B[g$!fCz rwKݒ謕=2-"q>ځw\FA\ZZI9QT^o{@sXYQPDe!"=; B"((\!)h{gr#6l666Cp@C 'm e 0Ag|vn@p\*!0"dԘ"l |F=L5!/\S 2! = !<\hݒ\o?L +o9esڌv! .~俏f(-ET-aV`Z%% H#CM{RH\tIRF+=l<niw 0QU@+1GR8b9C$ DIJ^q*s\\]x!/p-=&+4% R~wãN?dquz|w:Ć݅{pnEM9bG e2I0Y9Nh `PrM QL^JS r捠(k"Ldg4Ưa0 R`M)y/!pk-*ԣ5ڄkXBBQwP_5 ׆FeF@o^W5bl!ѝ MܣbrJT9t HIER2,ft3BCl; ghET7(l5:.t;1jXpQ&vަA` &!vI84K䜣 *b9A(wpSDn"n zR͚ mE EA#Eu4hmvx*k.b@u2ěB/F\_ ^Q0W_WtM{cJol|4l,~Iq;2^3E5O-|b_?R"[.˩GQja=?D{YsS ~? tga2ftv:O>-g8X~ A=Cx+8x Q['5L?q'a˗[֜Ipkՠ?G56B%15A29-{;t1<,|1Mz{{z: uTLd(6l" 4U.)*x$񑶢nڝ8I[@d2kH3_8NhջcZZC׿wDKY_$h>K<=oP ٻƍ$WYp8]v3IKFٴ-$Ov6jRZM5IQ-Qꧪ_)$q)Ѧy.בrB]+u?w)<a8L-[dݣy2 ) ,oa̿v 1h)Opw8&~ VGOݓ * =J7=&J3?}+krC3+ߨQSJȂucV#-gzp5jzup͞W*6]U}.}8JCw1h9/e>sqqvIpQ+u̕D4TrZw)ӓ`2>?{D>nk%B] $3H%̦Ȝ6H 2{"34MrZk8၀K0*`bHj Q[%.2 pZi+W:]vhǹVZ  MT8X!L*()ڠ\Ob+Ugׂ@6NAՆP~ g^JؐTIuiu쨤7cJ"5LJ}d0Y+NI3K8Rɵ7[h/POV-9!Rȹ4蕔OΌW*H#.7[26vh,.SZ(Gqp8O\0HG )J:*_JM%Ae4q8V,a/4s @GR38jhvUBhZcth| _e0SMðǷ)[`S \ww[Bi!>%kir=?:' AxW_5FZwDfZV2- ۧXҖ=\E[p#\u4D=ntӝ+pM=kTpӘk@;EE \KD2z;3Vᙔ!ˍ%`c)&;@k*aYB`5x*Mx//s$O30_k-1F+ƾ)y9,\!=3M /A[G*__;)U$Xj3vTjf1H\b Il&aK+bt8 P҅6̟_C[oJpn@/Fåƈ0M[ /a`4YclsbN\^:MJ%dU3T_bŒ=,jul+L.Ѐjf=A G醒bpGr'T#'}a-vɇ `ڔzX2>3& I4dUf.*a"8.dUmt 9 ZׂTc8^oiT!(Gv ҆ )¢ENٳ #rܣ %F#E w<<:p+"˭)3DLD@`n&MIUތ!bbQ9d&Ce0c ۊ"_|wL"c.v4UdNY(#ÁI$M&]O(XNvHMsR5k%3LĉL( d B H 3ɹr9ihcj%Fo0֐"ӎ7X 4{7FU,wccʧkKۓ IEҴyIgZfCu?B\͟Nj\] &ϋ3?n>,S0n. +udW@RM%NhBHP7dro&z0|K8EapkM`ќ G3 In~cp@L@f҆}>)rڴܧ[.DQ-BVT?Y/!Wga9ծmXWgX &%zZ)/bǺ$WW^al\̦~A௓9}~qZDcS7_+BIbX_/O'nˍ7[^MZyVS*{>m"eZ`a=y%U Y;7(ҲSǶw-Ӊmw;b8:-ݶݢ;ݚwnQ6U*Z|L[JZEϷ?|h1E y)&;vэtzEp0M& L;o˷uȁ8ѧK(װP^DgmY@1lR"H6pфs).{/]b=w tbۨݎ9Jeg-:BwkB޹FC'=$Doy<7:>¥:<9<'DN$VJ!ct?z[AɤJp P?L-dvTkfje J1dYCwD҄*+[$8=١ z _$J7׉mBFOE&TdC-.U'oe)g;_.KS\!xm%H1PRf.[T>c9?}~/Ǯ$ÕW~x~p'3ΙcD!eÅ՘!F}{*!w27f_&c/VH`I$'C.iH\"Y(_xY)䫫+51@ U#06I.*8uH堨{%p4TU/\t] |`+qKQg}@-#X͓1= Q[ kw%%dϾc?B;!9B-Or`1i6ޜiѳ :M[Z2uP\xv|m%{՗qeä\ۤ3\/CCO<4CCO<4,Rd+z.Z#4#9bITLE5$Ӊ!sZ*Q46[\up_'[,Qn&9⌬.#qq1 Лby"åź\!wiEdA;׉xfp?3 Q~(^/g B\E?e$IPŒsZ8Gwu?m&o\^_] @aGa}‚ Rc AcMS̨v"K!ܧo752Nyz 췻%cmiHOb bX>BǗ/S§ʾKn{y<`8)<"<,3?~p1Fb96?=ssawu/_R~fD<%PA}Q}Ih_t_*& aye=sXiζA0jŇ3~4d6}|pԼ25fv5%W˦|gm_yB6;#ov>輖wEtN! n]"fVr&# `Tn9Uz1=(Od|ݏ  Z|U8C3Na./)Y+K@lͻ"< $!dwWzF071ȥ d ޫB w|Jq7."ycna*YPL+/HJgPjGH{"Obٻm$Wrödu|KER3:ޝd1}[)RmcI#KŪ)c(/TZͶ8k~ DAqäFj46\hjXkt׃LsT Me+e@ZI9L@q1d(L$bVVB UJ]zC`ĩ3̆kPFSU5U$( K̈́ZTn¨Y0fbD^ GgQLlp,䅛hM)9t`n24\j=ѧKim4ڙZ+h@ g.,iMGCz/˿}'SE3!OG3!F4I(vh[#ɌM-U4fͷ)Њ9a!sNv}t9r]JcU Ϙ14\f xvI.L[ϲwZ<L?_]?v.>^\op/;2G=U`Db.J.I393klr=`Mǁ/ull y&aS7;xn~SW[TWitx!Y7M;hũ]q  r4#v"cZ\#QVR up𩵥ft"m Xp3Xpf,,T8hM-8΄8@!>S4X1Q0-L_$IO->*O9{W*ՏZxʏV2Is6z){hQ.Fyn[yP2)iBn#<!Lɒ3{@"Eˎ_ׄ豍?f/a6-E0O1e*XҢ£pX1Ur1_Ő_%8ښ*sb1L͛N,ARPЋu)b 2cHc@ 3CfGB4c!b ZFU) z cE"4%ѻʅ8xCϿgj1  rR fj'33vWq0ݴ (P$ܻ#m#:0 5sX2yk*3Þ 3/1/ܡyK iҰGi+1 ‹!{.+OaPΩ_hIKUOc߈HwdU3F+2?"Yۺ᳙, P8bMA&ݳ$Yx3N9CYI@AL} jf>+g,idL}xQ'v -xc'Tj6As=fETyk4%{(szAib7LFDQ8zxS>PQ< pRi~>Ӗa-+zzfEwle/Q \TvjŲo$CLr>$CLr>40o!)wx!P : I5_VH#_ߝT/+"azw!E⨞]ʜh!Y~ />ჍVDk1.(8gq'5w x%GZqTPi^TS yҜHB SpP/'+]y+¨-jxmAiwAg\r< 5釕V'UsŠs#'t$YTBroWhu]!A|LFB! CbNj"EM VB)R{+82j9:}f Sf+b1LqA0U;a"ITv,~zi`Sr9A/з|G~Qa/ϖE+ѫ@'n]Er2Jmr4/8(vB$6 I) L32cLOF|/:qY,:q_'NGvv!}}jTzCݗ6}I2ծ(TWe]{iғ{I%;DHօk|>bEƽq#Q!':fBWA,kTQSA1iAո}^.Hk*|^Zњb ]0^ ZjV;iyQT8E㽺^^n=jZ(4"[9WZ53(AiNJb-`1]- E۵ k "&rPoP΃3ъŐ]qAIzB#{1wZ д\kH;)VDOX٠}̀4N&+uBglK0p"ȚXpF1*ѻKD I ᔝLI-fK JJM\!PеFOmT!~\ɦ$ma:pd. ނS,o/$q v{I`ksyy9-#07̌! 7EԷpD&s+췅J12CDa[`C0LX`Qs謪חlԓSz"E-1B%\ S(iӑ>"#<ޜgb@`IRc-M'\B|, 4:|#j<|]ȝZDй6US9*kT_%ʉU>cKKqLp}uHUTQ2]qj HgL2cd)w"S5hW׉"ssjh%ZHaه>e!.el]bW"v> J1U+@)Dh*EkBker#}s>|9S :4\6ُ VPzmzw_Ez4 {Uj{w:6qb2ycSB$ Ɣ!?UZmNT|̀*Nlyde)f"uGֶ!>if~(]} r鷾ĖQZ8QB2rs{$v`&fĖնh-\7N>HSѐ o17\Iw{9cL5KczswnH'OqYC+d~=s3 9czD_ܗ4 Е[58hwl7]+;[f,|8smnu?\iOlZ.jxcm7Nh-EIv$!Hv~ n}j>\1><&ր SQ?/ZXCRl)'W`R5 gCwp@Wd0& A0~:PY!]N8xiP"(4 K @i\3̈́qL_~WC\ $?2A*Yh}yuŁH٘esxtq2;>r =*+w'2=7%J B \VO.TbsNVJ;Ș'!V-㠤dfTƤ&z~SMDQy0B9Ɔp J6NBY%_6;K'[d:W ; /; .CxqI?V<<9zip&43*Fd0GdʨPo%!f`Б}rԨ1ftl?m<\s0*%_x/uUe2fͼ:hh6\g1nn0u۳^)-Н:7Ο1/D:5>c-y|w:ܮ~\.gSbBB#ҍV>!֞cUYmuׅN/qa]?zV1,\WA Y 7,diw#z\ RL'}J1QL)[ y&cS̿6tL-}Fvz_,Oa2;ѻnY6lՏf"v trǻ:l2 JX 76~I)zFpK#: KK&h „QiUREwRhrP?ISrbHٱ9`K2]$Y:i#N1X*hNRP0[TaN.eXdrW Pn\o1.&F4 ~@S`XMiȌhVz#{M0v!`_3-ڊ6!/ii`QVcg&EA#J rm@ 娦ya)5t W0QGZB) >2ehE*`m"V:ᝯd TYR4ҺH}9ET$zLx08DSN -ZύVLPf . "dORH!Jy h9tgLѲAv MJ yլ2:C- ['fj P:-# Ɓ%hl AS(.< =q֛*rtb_l-HV wX8FB _Z:SFzܳO:"UDxag{eyK|I#zb@ɬ ; VRҖaj!$P1*[ZkZy[U𶲊H(2-tY1t6}q}8\Iִpom#+|9jX\c~I0h6cm4"i{߯Ԍ(CRFaσ.UsT SCƄRKhsKӜ' plEps0XѝĐ0! Zjα ޲A9ˮnV"P,I͸k>](ٵkҼ|Qus|4(M(u>uǗC&hHcMZvw ڳBJq+bCFi1cFq}X~!oNŧ*`ws*|F/X0i,[^?;_6sۣ9Wv}BiOZ W^B;|U;m jK[^_QV IJ;RhRVCp' ![Ju]k]ˇ~3WE[Cwh K x?Os@8_DK/{Kh5JT9n%XJ`*S(KU,t k9̝94w;_e:!Qpt;ClZ_s[kMƓ- +t+iPQLNԐDSwjD5' 3VK9R2}Z%J~@BV-!qhG r}с=$ϟ4bYqm<;{F3"iEޔ);%Pi22Od9 Xb/_9k=-dM(&_4 O?PJ!ǚu&(.$6s7RƑR))XLvRڭfe(2:o͋>mmc祫|d2h l &J< |A%$J0TX,@]X~3MoGō1 UXQ$h2C&8`00,uB9,HbY] _cR-<"ܤ1-D*1H\B pUT$ EFZYr0.!@:9XbJ "F8 c/1 FJw'pcr>AUEGsD ǥLAltâ -ϲ:DBC1ץm 7!,j[Q16C+/ J]19C$P}(շOi=Pv `?83;mRk@\0s6gUZ!x=p8[tT1?=&gJPz(ь2zCS6[,ywFа9Pb85Ϝi{hB7za59Ac.QD4Fbg042QbT̨)'ZkbIr)8"b2ihA ­Z֍.(ךi& EyB7 UFAGێJk\%?O#J[ycŃ-t.\cy7 gqݳ{' t7<דt\- + ;WNjuzD E:kQ^9{S<ْ~>ο̖Ǔ-(AJW]JRUwLk o;}3vY`:}̾q fdQ|ֿt7xxHEx{UP]R.:bعHca 01FWN!2Eiɸ5+pEX>|MY HiV@DMNt؞I.GFFLukˆ\@]7C(8u?ױ{{eW[1 seRAϩn:D\m0h)[cMrěΒ*_.^̪z-PmBaS2R߁ 2.)Hs>BnkF1lTbjJnSE8D4S1Sx4)wE<ǒPrNDKdx4_#7JARG엇 ,t'#ha)T49Ju/X}Br/~90yp+[lA֏fvSLIĵRs2tjHJgx6X0 C)5 ;$`Ԣo\ypEw Gel~)491.3|hOXkO Tqlg#bt'DNW]$_^unjGm0=p Y_^]'ɎxrݍD_C!!q'Rs8>0!u[~k=}u'ǀ.\P,sx!|W}S BR]nހ*ʍAV 8I$RS4l`c%;G8bބDјF!$I%0L)b'%*E[%!bE$V52i5x񕋦'*!dy= =C~O3}|&||-۶s+ ~DzA%v!@V v[221qԃd@d!1,muƭ/נΥ3SXZ[1Cc{dŹDng_Ie %P)-|bFb$w;sYjsc#,4O0LPG;x.S vδP+oG(h('-oXXTDƠ(I zC*Zy}kk,7 Urj,_b]T E"$-@`[RL I4'MtJ c*.3ׯqx Utj.Q$ DVLOHþq%mZ > \o=@K1U-;t}1rA ~8^9"M ӤeQ] qݪ[R,Ua,s}1k;y][uKd nv)`ho_$zVs5]g@x3&wZ@z9?̜͙CWƌr\j8n2ٞ8p~0?x㛯ߎ l-yNA:H\^t2LL%ȷȺf~S#XHy싧V)'zԧY^T+XI5Rx=wzxh/ڮ\!YjcdYӽ3}|A]?5^};OwH{ERёq*x9BqVx%We8xM`+K`da2(\>-|8XC`;w/sK;?,X 8#tSI!Sc ~>4#ܿНÎvӸ ڭ)!t:~ ޴[{ڭ y"H8>{;yWBڭ)!t:턾Ԣp#kavkCB^6FbOeSýGJ:M'ܦ sh:GmꄋS!DۦܦڐW.dpWBN^դ"lX ЍR k`Օ . 0C5NՉos%`9 ֫pZ\ Oܞgp=.Z6Gc^nv j B?;EH:|{:Υwi'\b=[?s'[jvpu%Îڐ! j]pV~W80L|n6 /;@MdFh2ѼRHRF'ن0Z;ᝡ1b5" p?GBiNcǓ˘%z+myB=?yK6k KiQö uq,O{\y(|r`1=pwY! NŊ-M"8_Zx]&)|ܞ O: O꓾˂bg3X'%L0Fbȧ],-)8&*"(g%[3,EEN]fA:jY!On-6 aqvj\j)c7.ÝP?N&=C4Gknn7fi@)Jbf4'Xl#z5#\x*I^n$Ka(r(xi|~t 2<\/5ԋE$( 1ҪE ŤBˊIר?IީWwHkCz;:_I +8F#2br(G$탰/)}+֣׎zY׳EE#a{luIEHݧ B?ƌC,VLQA}]9Wzt>^kZêEG^j9]SY =HƝ]ĘTnY ޫAQUnիu_=p^2P-}lBLDmCzdk[^ P!K1u3\ CPvo{+b~7cg|i`:}̾G5㛛Of~;ŷo]ͽ^ K_[C[X8bpi|U,S*Ȇ0m;T&Af^{Q)TҪE"gLQ T{ 7*&ѹ]9Sz{}"WGo(m)ϸ735ڠ<캶="VdIũG"U_{H3:q;5G3{9J];r Z; ]~r!# ((R{F_/Z"ӵaE A޹(dzsoH\hjZp 5W8Z8{(*e Q]S Ñ&dI 9>/Rw 6 ww\ lOBy.`^\/ p]79e@) XAͶLily}I CˋRE;G+K.~ztPq.XH'#k8h+T PD0Thzd&E[ÌTRk_C,# x5]jq]lg \B^fٔ;-ɓ(䗋At;:UH:<svG X 76Ͷa^{{t 4'^|c*,_omIʟmϖj7ӎ(0U;Ax$۔b"xQvGK +8PH$b9f2BiJ,DO;qKrVvL]Y%v5^ArCǔW!zte&OU7S8Y1dqaKhlRYǕIʷKmu1r*Ɇ^&Iπpy>] F3 gR*g$2|J# 7ؚE(-}rτ3~&!X}^54ClBʎc,!\",jۊXI; UOw Hp&hCZhW}qQxw BjƐMgB|m}J+cc,j0w͉C)-zw>kg،7ˏw_\xIʮ#ŒDs-˲RoeaJѨC1YcvJ 'xcAb{*ۜ7T:AQ?tJUqZt8ikRjQBZ29D_ο|9"R O[/ ˀ.]^}Eqޔ5k;G}8șAϼ9SJ XeNkCt4h?E~{Hi|:Au __n%)߀$pI٩99-]D]3PL76tR˝F)@ёn9^u8];}wPt jfsY9eDו`;zRv%+SKC0B+J Vb +Q5XJ?(y8H]0;ȩ2 qUj*Z;A xCÃwP4 'tGtYʔeES9$5P%PY.*38=PƪL4yfͧ22'?-FԎC]!PmVm)Xʕ @nk_1QK4q0'Xs gXXef%>׻SdQoz9[oijfxji^qy8f{p0Hi>H{|o'Sp_]ulQFqYamŽcS`ݭ}$[1i%X=haCktʠRL !}մ}EKJǧ7%ҖЛzILb*Aaq~olkŇtײz֏?Qr|-m{ %Qji{)ux`=z8Хh֛.V5>zlt >z1NYy4-1Mk]t䈽cNpʲ8ڞݠ0ZNF]Nɐ`<ՠUn`{K//ϐp٠dʉNHVWs=cy*ToB U5ro㙭ԊY[CH*sZ ~(J䬍N|„N9DqτَlۨdZRfXa)3k]|'11QL<煩GRZh.yM#S䳁{u χ"?zC ZE1.'|asGKo՗ƮY+ILDY{dE~V()Ɖpj LCH`7WkBW)bh6{cڴwi;A+p9nA%s jJ*Ly/ C&IծwsyQԎw4u#OlFHmG#>t*rS.ڊ xL:@d{L\zzChY3eL6R秠.]W;OFSnr h5GUH ^J~\ɟ\%F˖YTgI@cʐ3:]AFSpD]ᦹOmB1?_eݗB{@$bB X;"ˢLlOޕOo>z= Fab@@G~1J]Qख{&྇_bE &?|chV/Y[|w?{_]}, U@DYjh阮ߣFRFúz*K(#{TwL 7k27bde59^ eEPr^j呂T8ЮT#]O}`HEgwק~X& #Jf/=<9 @_#c2t0/~#P kAr@g+AFQ}9! 5"R (J"fjL)X_Ѷ@Z, DQLlĴtQeX \ 2LR,S!-P5tDQ *;N5SqCl%eG˰@ &sUZ4.RVpLo0o'5^ VJX)2͂Lvfܪɇٺlۣ\Ϯ߾yC)ԼUF4 XMECYyDfCm*TU^; yX'Aq~WjC7pLtah݄0W)4inJLvcL{|r@jh}/ hwr=3/[zs;& #f$GOl(Ҧ QmG.DE[aI3`Vͻ9;.NWm@@x+؍F8/ߓxɲ-  +0R"+% ZxkUI]5,hieHU+z/5x.!^2`Z?7qB w'{!Faɴg{}{ªeIט6?O>>duVCoc/Vx:qW0x&?8;hMu밲YS"idu> gmJqt UKn96V8Żq*[.).mDF-ޭpͲ)mc+tvia iz@/$na3بl4{}P5$k!)*qS$Iq̙ƹ %b%:cZx`aˈWԉFn-Hw.eJ6-yqb[c#2aʭ%7JrGE8Q$ Hp!@a ZY+m%q)˜XƴtDSo|%G9n%seah!_jQ!6"hSbD[πbxhg|cy1 n ث`O>~yC\+]`u휹( [I¶a\$ĻD}Ǝ6_ cGwb<᫘ccGd;Z-S:N_eѓh{g "Rf8Vduƽ8Y^|<]yR%rG xr4$oؓ?o's-niɰt~iw4B^5CP{9ta-п)&$cҭ+!4El3*_|*̔!-u)G w͔ $+-3DiIJ:Q>(cs !0*PQphR+j˲ĄC}{-BKN{\ƂPc-K K^HL~Ճ>;X2A?d,ggZ2wcVD0i NF,%# ʨ»G" 8('E#B|ahc!dEI JhB1\SZJTZHUp 6UV[CKu'$"U :'FgP*S!!)ڙRl:T/-eb b>Y/ 3S)ȹ݇#栘dō__rRpڧ'#5-6%,ñ!Z‚ ҔGmݔs8}6gR~5.*l=2׀!<*`_fF ){`llv`)MYFE:6WVsR!vń Y]:л$" AERJý*1(oj,0?zs)u$ߚ/s{oeo22qu.މ=\9K)_?_?mV=Hufrq.?"oW'NgrvQǶSXbXG7?b+Hyy2/{Rϣ\AYanq{ <0.aR|y GU8łx|MCWn} Q]}DqhxחB?y}]a77})eIMX`Fޥp=9,<r ?y-099[̦gM`ͦ~}7U1aMŠABVJo?*r0؁,OgJ;UJbS5K"ٹX+uY0 \RA{z@S]k ҹi k ]2thn& \P ]u+ R|Z sc X XbP3&,]T'N.pkqI[U6)8 W& 2y0AZǹ8T{<!TFTTkPA6cԩ #O7%Qq.]WM7EVmCBsM)TO FtRǨr}Ȩ-=q+rukCBsm,SeנdͶ]mS&IhxDyDO?!Y7A,.o2va]?͝}_y65'g' ;y 6gO&dEԆ%͉U>y ['oOE}snz]~&|.S)U,L2m$0*lEa? zAxDϏ~Q/,# 2Qo8>qo |sAȅA8G>>Q11!5e,~o@7Ӽ1/{JuGؿh#kfUE\FWa=[jc;~YVR-[E:\AQŜk:nʹ\2{VyE|5v\LFwUٓqyM٧|z6.VCyzTj짓W'{8yu4| Z3Ğ7U= oɃmȇ=:) DYu-’\ TW-fvM+wfq:*LEB0H5 U㵫(w1\ȅJw^9qw9^Us9"Β*FU2_JkJn1IB>yMxJXA{'5k uP&R d` ZG8D@ gmCDvQ`(צ%bكFY!y}qxy5}\+^ͽ1ftsU.Bwn' F w&6yxo8463rȴ95x]3du"c(ʁo+BY@e X('C5(x)*r X*s.]ibĝE r`-QZj$v"Z W-h_ʂ~Lo`AE(X.X5Jp^r%D(>:ծVTcB(i B%Xi\R9Bk /J멃G1rlALِNO ;ψ7~Bܠbk0@C;ńLrw:u6)&$cҷӺ;<$b\ ()՘ض#ciq@CTvm#v!!߹Th%2(\=i|^Y)QnV1ޔأS>_g-XInYu.ٓ;'E/~^Ù'W¿-*|>9?;D"Ͻ%*{Gs3=[P>Wb]] ^vWfŝrr` ﲐjzDuկ"'փ {5s@eyȔ*) #9rU>yE;7[ 8skB!J壥brkޚ/* .7EUKyȿ{dK~\,3}s \AC_?_? nXU~'YXGK| |ez88!t^~g濬"߹[nq{ {b﫴G̘>UKa};Yx<+ 6A^4T܍)t|@fAZ;N.l0H9oڇ5.*lV[1-2 f2u$!w97zaNjTC^oz[.QF-= j,KIf /}Į̐GfvQ/)]x42f0sQAU}_t X!MP8yUŞ^CfvdM3M@jPK8:O-n sa~1T b,wc1. Ic*͸Ni\nCBsM)X|!nĈNuVo%O4vukCBsm.SBHy ͔bpeT+rE]m{1]ÈqLJkmyōc C0CF?XHWE7{KkA 9|hھMK`|`KCީ]@4)/a`[IY6l[H%A6:)"aS[sʹT]8|[*`$ѩ́QluFTc<ҝɶ| LqRI"|&S9~B\|񒙌~&XƭNHac<3+6ẽwKDŽzy{Qoi98* ݪ N Sа}ì(w*&}'G!\aϤ@/@?0ٓE7\"N;C;TO`<6ߒrU<ZdEY2ܶG{\p Zm3߃Q /g`BJ\h r8A*(8\'[+W咻*AC8Wcdt7\U=GQI>89Oj3}*v;e b|'Yز栙?! db,_R!` ²ʪ`^|*9UO'tچ{1+lJx 绯d}̥!3>d2 Y=L2Lr:KD$2sTj-d27҂f^RBFq;}9z>zs2GIԁG%ؓ(_6yƢ%т#&`*u*^OkYsG0ÈDi$GF`q\n0ips̔p4Tܗn/)gXM =io# {bQƖs$[lrh[͖판-vݬ"#hc28O}hRrp)GFq0bMk{ X' h]ؐDqbG=QVXڲ#&}RBjxJ;6qEA $v$5*X=p6.-#M[+G1N`< s)bآwPZb|{vl_=@3|JŸvU [qtI=9:?\`8j\`VOTA4DžlQA{J%9%6XTtܳVP,)*p!- ;-aտy4[vZ]u=03e P';VpBNQx++C:ZB۳è4#Y7.{X{m}z <0lZֻmiH+ѧy޳|SZ(ݛc1}F]"ʉ_(Ą8>A~.B`FPiTC,DȧT5GP =h\s`@ 3U3FID 7u^^z?V ɣDfsH )8>,^٨yRR)$4fV5+=Ξ7Sdz˒Xr 3e^]h/).>knه3|l*eYɍ=-K-^yXKdBˍ \ȫlPIMmv:wHEqm[nOvx4JÚ')MtA05wY3<.MLDTD>l%LuNڍ<(IX?1Sr|fl)g냴ѸۢQ9\K9k-4ki!N vR"gRbKXd;;j{jbSla\REThON%$>!o-p8}Q;W8zY 7dl͂e|~׭ 05P"SĠ"&zUF'ݭ1d^/>1f/̠zed+QheYYl*vPvu%RYfkלQ>QťfMZ-PjE:\L3aD850T'欐hKuc!R3&ekmv"n\PJCe0NizpKj4!P{6Cb<3/ft<=9%H'߷iYWq-zROwsgm}X|aƀn[ElkmB.PJ?粢)X,]F+@f(OHʔ6#wXPGƪQ`/"o.뺔-fkZ\J["gKՊX/4&gL~&)bYlxu*>>`Fy-w}UyqiDpLi)::~dCUTz\%Cwjh~ذFAG'H9'p M\EO,0 ơU!pE j)U߻g2x.?|0zbqxHFBאd1;ޟ[tonRVl&BR ~`UNZxJWr r|MWŽwuY)cU?g^2xq,]$Zwܳ𩍮Ź>˹\0i_ūncdRg둶:ۄ`j]$b:֦mR:&攗ԃi+aɭ'.c5W?05:Np'g}މT-t&=@^bxq6(BV/0?i),iЯ+'oreEC㵤\L%Yz=X:Hz?|K픴7ZkMߊɊa6~ H嫏ckP,3Pu3ޅ cVIGvW4]E^o8] j_P .‡Q 3e2ֻ'*cb͡__Amɐ?²-zVʖKɞq12RN4ḝP 8AG4%Esq7rXcgSD+rLjJf{q3n+=\QI_aͻmi1 lL薻?AR5,OiD c5n6~|>/bɇrFr>|Tς/%*{:!Ϻ Ӌ!*+i ..~h̚ XRYbSEĤ((˨(cYa2;&[g\jg:QME.?ߖ{so}B&Uq7-'"1KThmʱFkUJ$uXf\?=B!j[g={Ҕ F?_#_4أ7 ~aa{MgTJG=זqDZ\.MjZ>W<4pGoFA\JWFB~AK|+x%:R󌧸 8ō9#9CqQ>)%X:fQr8RFwZ^Jwq>KU+e`_$J^NI6cк9OQj~W14v|:ZӴ+{wZ# 4^lp nC1ad"fLX槂 /D?1 0^zrp3yW`BU;I-L]uҦ:R2Uo;+w$j;J2;q _5%-i!"iOl('D[+wN Ih (#Y $e p:_dye ڂv #>.wׯF9evNjFձ*zr8&@s|`ݕC^|PL]1R6BGܐT Lx Jɲa}\xʌ3`s?xu}86%zQ]c"\T NN_Eg\3pJ|YPiݢ~WY^Nw˳җj"S5Lzqe$T~e$K-0_:Z!&uВahJaF$eա@YVO ɢU-gmXbO+]TWrhȪ۽ܞ"p,1k]^hۏH-dz|q7rZ06 5h e^yƴ2g޶.R y(_?9}ݽG3} d$?bM("SdH_V8w<#ԏM§󟊛v9\]d_*FKrqSo&j&Nh:]ٳ05󃷜J31 jYga4fs=Z`p5 {VEh9T񩧎=vV밾s2Azo7ȩ2,{o枂dMFdm6JF1=&1+)GG"hݞF6B{o{Y&6$̠H30Q(SJa362Y!RQ4O~OmŸ&7JP~]}Ň[q |пBRh`dj֜2>+(QojZinba;[:qGDȑGbI$蜦ʹ1z2J碌 #{2y99^|l~/dY-Dz)%Q"o 3KaGO,Ȃ y7oV^Y#7f=HD5tC#b<,u*'Bح#2SEX>$ʮ>77o#g3:.v0e=c1.y WeF"jDFeIé Z` ˌǑx1XmHBDpCvIjn@?| otmWmp4-:\ʬN<|j *_cшx5TA%qPeݺȻr/F̀ǐ:Hj"ǑB䥄G<\d_#Zd8HqPy:gk$GJ vJ D&/4=| $`j}4O#"V Y'=f$\V4w[j>_88+'b1)D(YJb5!gXN) vJ$QiIqLQuJ194CG&(NQ3\ LpxeDY\AY'Ɂʤ"B'#7uT%÷(%rSp^ ]0,T2%`` CtKOG)`Y{Xdh?{Ƒl6l\6i,2IY.o5IK$C ـ$Jt7* ѦG41E.U&V$Ic$.TnN\EXѨW!I"R.SKg `$2gX"HEAzJ Q;YZ4Ӟ4 X(oy3Օ`Dqnt<OsnX,wjZSwd\^@A08 !)= Ii*dвxRFCw2B㇃~[q٧(V&.i˖*'r  V$qШT07歯XZD0PQ+4RA4&RT$F}emMXQ53ٰU1pE EKAr-8jʖ:>l2lyRB;MirP7.Lvh|c\輳LqsN%QtTBg}X_)W ؖBN-!HϹ: / gP!S^+,P{D];g޶Cxt@O#v9Wƍ9ʣC F-bC>OAh먮"^, - Z0TP49g D`X(Ip|̈́6Vẗ́4yy?‰D &@%&PDX3~oS1iϩ: Q /H)Dƒ.zTʆiU+~~KDpB EAU"a0-5E|OzǽXmgW:n-ͧ?~,ceZu穔,fgb`o)(#H*[aF Jou[s]mkR\m9UYZt*8 noϛUf/f(Wq* PC)46y\fi%yWV_.AU;}&t19cF!W]q|>Bgm7xԨmRF= C\2[ŏ\W3Uq7WNG<~?D,#x7C)10JI+'D/3qm$jc.W|/n1S&s;-UҺ&޴0}+SXL|ܜ>8O:Amס20s -U]==Ti"2Yn3㣎*KxlbS'X/PB]!_ xg>QuUx|f (9Ц ZG$*&uTa"*>.jsIFTd^ebi$h bCl,h Jo"=4 bB`Jm;toFq9O'$ZQ"p8x[}'G*%’VgYaBIۭqE!*HZ96ѺM{Ǖ7E&}]㺦IU'ι Ur( |v6uwәoYyR~ 3O*~ϊg=͡m|SZTgeo{`BPt~PStuEA@!YV;{qA@j6~)J9EGӂ\wGwWF Vv^DӷqМhks>Y5NPODPڒՎ\#w>ͳ+]|7^2k.8m.q2JCqN0K%$#I9IP+ըI=dCȁq(]Ge2KY+ӧV ߭2=PE7j4@A[H%(Q8c"*%Mzş%qx7eZ2hfa'{r)YnWn~mȿ-JW[ZvBm[[Q.Zˆ Ywl4[{(IђC2ihrx3gHTMF21qh7 7v7zm(pr1&aFb>\Y&`\xUBk)RNIJ/w4UD=sdgKN7R}2>,*uВ*R*v҇2ఴ|^9,r"֞,ѡɌMftegtXqinur !hKߋB&6GXc]ė0amLf@!Iy r_PbQR.DC2p9 Bd Zͨ fDw+njR 7 ֠hA/d)btB@!=爡C# BA8Q1GNj Vsc^K^Uq B)Z^4_.IFbKϟ|ڶ6%uk'QElR=^ְ42X٨{Q(%D@7ARb9whѵ3-rPὒ1K5T@@_z_˲ d4H1+tʫHϖϓ#b:c8[^P[V;׋ds`6+@Tt̒;?Kw:|vs'~ *$et=y5N3 _yy#{V4D2Lr(ׯz_ gBtoYUOpwޛYx_a]/3_Lt0BzY_g=)zo>,Ywv~6~JG}㷧v4:}3#j=<:C:vߟIN'/xL_EQOpzPgn;]o?t>͖FqF:vN~|sü=y1>w?o?>l'IV4ӒEIx#/ӄ;0ZWۿZ~. 7vjbFvW~|c?ham1۩g;Yghu-Z]Ȣ9Bw%G#S~{^םTԓ<96I>_,lLR L@HMRN8|`+33 NR} $v7O@޿wkȞePԻb L|m\~@ʩZvMxZP ε|mJTyJr.(؟`s2}/~K $'s  bib&v[ZoaD:(QL+ À)ʐ%1yQUgr1;\І0*ame*jB^KfnHv/mٿ덙ye|ؽN%E[,˒L;rfbbw.\rR8 @7T`]"ºHEIPbJp\",@-$A \"ԄTSk9D7!ąq2+ua;Z, 0H`p XjJ#%S\k+q& #pN4 ȇj;X]b-č9|ML&Oa" R6Ryc;t!V鬤KL~S״MrO PJqC9HgKNJ !b&R"|{A_`(*c#" 8D b=F$kLև?^ds?]Hֻ[$MCՒ?hO}.$X bk%UIԔiË^2/>UkCgNU5ɴ[3(3Sd Wʊ j(-]on \2Z-M:HDnثdӄQK"jM-($V2~%/i8XgS`BMԘro[(6mc$1` bR[igk`bj-U 1!1h`S#Vb!mKJb곂V"R|`Q)': H`H\`i%XBz,$6N Y=Q`f0#Lkv#UJ[b0RcbNwq& :Y˥>?i?KUF8&Onj3#PEod=3*K@4L,_̂(<1# )u,%2`"&v(ˀ2hD[%]H^ă.jYi#(Q3$%QDa`0%,uQ ;D`7`9N< ar?WкC03ޕ(ƅ"}bg6.)PjCxVgQpEh ŚET쪑Z[ho9΋2bKbTd7B@V.D2OoxMt*YD]wF\'H+"7ݏ]ϧ]yID B~l4 ?uwgb"{,urŵ'v1&hхvYSN.>#\<^Tl|Dz$wDrgӑ̚ӑ(h'OduUeMqu}/c\ (Bǂ16Y1m!HH]\P!I=\ iTCG=clr軠M7tA9ӛ(esn oJ'lL:[\+!.cD)3\F+Mz`Q"u2Zm e+*$[QƋ^h2 VY'x_:/Op6n}ٲM^I^y 6{֎E,*VjuX/"\5t )q \B#Wq6:wsUs\%%ܗ6VHƮK=$8$ iPE&I%6W 簍NPħbfir\2*WwW""<f_?#Ւ`UPJ}aە5*ip'\vN!=)t\1>=|.n򴀿4 w)6f'&¨5VQ7ʻn:,ݬh>]rznɩ\5-X!څ:uK:e:J+#$Ts+G+ZsK)cl" H:Z6z|o±0;z/_{sjN^eLq-m}y}{յ{ \ *2_ƌ9kDb`e`blHdcJ!j> PAh%tuT~=>>8;cR kItsNppXJs=ЀKo~s5jw޶R2з,8㝯a٫Ow2ic1`&i%oA 3'O/:~|GO]c۩1zj >vU3}:;ڭepnoK@~l_Ҡuփͻ5r! S Zeð4y` x.xwphNp0v] < ߥG< C~Ao `ã;Ъ~a/A6A.LFo^A?_^9Qh̪'LyGćqNE@K|~勗σ~ }3S첡(e"gyyxQuMy?7K"8hi2 a ܏.71~,vGZ7/:n.sg;`Pr_nc_K7Mmʩ}?2~%;ѿrCb.3d:]K[a_*SE Y n4|Lݳ [oOg+5=c?໦@ '5[ Єf~cۇ_O@#[ݷGտ{9tj+3fgj7A^}7}*W/Zm߹djsMw̳J~rwGouɧCO obZtۛ2zvBg}Auo v7~z>[}wyzu:k ;lKo֭3ŕ'L}~Φ4k xھ/ZByhg-Fݞn%ȿ=sfO[n2J[`Y3Of=} *{/;yFDmV̥i矎]FzGowby;lvι{.u~*0ag?0yu=~SzYվ&\::JVǂ7 [.iкϺw_w{<~Y;|| DL&k2FWnDyW~p {Ӵ0%%g&2*MV' -ǜL}{CG<p#^,d`.T3^sWw;^vFG0^;Osi -SW'y4yܫAf7H?z={mwIN|t;K*7 ߿Qw.\.B")z˷x˳Z|1z7c1뉏31}yL :)?{^{~|?_?MOև`|ѽa/zG?^}E{ы"[܄qH1#!f!ŭe}Fn'F(J<38-K/x|M/B_X`f5na5|oaQByNVmE'kVJVDYU7X(lY#NAKT(B$ v42(Ql*g6, o›Fu6, o#,NfDXx2}L|X)?6,BX86, q& ?0چ 'gqo{jކovNĹ85&Bm t;Ωhհ00ṪF넅 8ȯD 霻c-; Z0iT"#LEaW*\&{o ۤ'`q{V>#mmx6?6ވ+MݶAmmncV=x!q!xn*omwnlfۆnrfHyb{!o`k8Fg6 ml]9L$!+&ʉ2C(O(ܘT/vw֊ǝ&"$P"!(&U=s~ˤqiacQ.뗰XRnpAXA ʥ}#eD߰ED=_'F$="B Ʀ%^MԛƵU_C{sTxw5cgL禎v*ꉯVX}BN_$&eXm.džh.(zecu/<{Phr''= 0jo͌E|kյ콥_x&,8"JBj4TD!>TG ݹ/ӝgݙ~+=y" M%q0`;F![̈́4ETc`lno=% ӹ* eR$Չd Ucg#0P*l,bcKEZ 1]ms[+}4śgi37i'mkTDʵP(Jte%ࡤC@ ǃ3!VS2=/E1  WqưBWG- -!r$Rc pIDQ$6 K0^z0rhFZ/32#R%oMTc ERjb?Ly>j;o!e_b.eBƛQPܜ˃V@8O@#LnSKnSW濩Q`3B$3G%HcS)fDߘR%G4vF)$1D\G(p%9vg%}PʖFERFk .1َH< A H2w.-9bIr$h QܞizjC<!Y <;R^JC, /Ðn'0qҶN LoV] _K[>"O-eR^e { I#[uZ@%S!Gom`Ԧ+rIڨ/UVBaJ%h-CΖa.WvIW18iO2EhNvycYb9=iV)Z' nqjd0 K|_VVBw|V;ٸj.f+^VjMٰl NmP!LlA lUV:`R6RGgr^YZݞ=7G7`zhlvns:ζaB@ݴڲS(D&Mp݇9"5вA`$T2=Մ`%4蒝+ ] inAizBi^Hi էt2eۭ>[Ipmeno'y_K6)?&]9g2]|բY-wؖauEdJ6UѿWY~`l^* 7j5ݒBˤ S,Z}tlk҄1BXC@@;j{&?߃EqJ.,z_qB^L.wQ!jfmѻr4~Wktr]٧En0=3%26-iibF^eBdmJκ 96L]f>H:x s|hU\[sIřq\JM<lԸ*)Den Ԏ1Vdc G@}6-P2U[8CT|QI#A&:BQyd(*d  QC)FTɲ&g"Y S\ 2҉C'LuQ 1W(MSk>SV_#Ͼӡ +^9KNj@U\VZ1ooCS[1eYw owx_8.ڰ< /F1SxrU$lUv?Q쎕x4VVkl%ѵeT Ę8pmAOWAfA$ubŠ*r mzϬ.<i735CCiѫ` MM&1l`31ǔVjsQ4Ǒu[,%d-IY'f1\}RH!Z@hs/[jH`? 7iyQ~g3*o0C\ f(~. R6)Сq YB"XUTF_"#{ MϪfe A,*<3QՔHR- C44v5g},YySN\ [򆔭 b %u):rNFBP&'>'!`n x}hlP YKJŴ]pjJ!)-*n%ҖcI9Mh$L@~,ٲx+ 76D[RJ&Ͻ=&klD"ꀾ>k.#_rNޚhJhm,?˯e8T5ePf3a3V0P=b5TbTI1>;צ)V;9&š `Z># Q޻ԬƹԞ6j'~Tg[ ý%CQNZy$רA%ì WPh{(j6\5UԐ%o;ö9- Ia<S4k#NvKM{ B {%8{0 #p9FxsFi`hDZOgO"vWZq"TU9+(6&&O𥚔j;&Xx,f)*$vJdxJdί8pDIYr5 ɪj=^g]4bi-NS_߽9^u>2T/vITVi%(a5Ĕ azCy66=l7#"$ %N:}bũ_"S[hXWTnh&{j@VګM)8{?ͫ2D8G0e |:-Oc*:ȉ#[Ni|؈(LFS4hM/:䇒غjr1 񤴹5Om]5##E;vW'0ϐCT5k's O뭳m%]%r e}iid?E "^t{͹Vџzjhǿ Q uFzn]P~xxs\m76gG<ZHks@3ޡ=h9cö 6(=/y{ t0;/ӺMZ}Yߤ]7iݍZ&1/nuLyJ+-I8 A{ ۨȅb(OFW,)d.A2vC.{i]xM}z 2>13@FVGW*>Z&EsX!ˑ:FjMHsN+,i@V)iz$2;go!Cy:[Sg=8w'I]k؍vZFOYZ;Z6ْQߞHٺwaF{"\LwPnvfBp>[bgx{% 6۽,cxsDl.mk3]L׬隝xǖ5yF5kBrEK{̉KQ#0ʂV u-_7 70%gy$EMAjijA]؇db6壇 0Lmh N'i*QᄱA}:5D".6}j;WQ㓮f *1 Mwm{NDZ^xfvb9咯cVOIjEHÇ$('G i7_ՠPTsf-;a6.߄a \i-*',,q30t\" /l6.ߌ0HM] 7|Rjovg瞾X.Ήgܷ)wbJDTW vx}Dcٺh7~~/{J2t>U`$Řu.}RH`z9dSbV]޽H1R%83(Xx==C8Ə4$t:'Ε4c} Q#9]8<Lz3QHK O&̎sG\f?qM(ER6 ~X%_k8ѽ;5i<~^7S]wL(gG^n,Ѳuy(&µݗݺ&6{y٭oWjjxfm`ݾEtyF џ]Xᚼe'Fpyƫ`ٺwy7XL'oѻ ݿ"Kҟleލn@La{Jt++?S^9B IZ'EVkػj=[xBJ_př>g*КJY'Exi"<҇VD(P>Acnwt,a0m;K)8p B"ڟ/w L?Kw ܶBZW^;l~RD@n)&$cwpxWN%S-SqD48 8_ވ.7|W݅d>r-9_tk8NûݝeT%Xx2]>3_b!Ƿ ,8;`W@jmEh%`Qlj]Q2o^>@gPzůxs/TӪ*lAA÷sΘ˻g ϋwW}qY:h^ EqTFPv%ӂ`n2k2 ^xR`y X4P9(뜆dua.&H~ǃa8s%oDI}J~9]n3Ŝg+rr]RP(V,Bn:TSf0tN1X0 ^}ƀ5^+ 2$2z.$ > [% zEX;nT`%hIvN& dNi2HrKV?N:'xbXwb5K|!XW$a1 Dr&^+8TQ T tA=˽'T{ v#T 7 HrjL؃Ϫ g-ɮF*0sUV#ڋZ6W0T!MA VB wAq[י+FTcRh nkW'zF:95l(*ɔjw84+.@`pogNB2n׆axLFFa4_OlVyrEFww\>+#7~>)޿xst*ۋ+sBz-Ӂy֛SzwU"\%#4#rVO#$2)s %;k#Z:}"\#dwCpyW,jVi4Mx667I޽)LӈwSBU?biVb)ϋ?heaJb?6M現9m|4ɚ׻+~05˺ZE8"r [<:kA4WBtf3!bՎe}X/cP-r@7U ;nu_i~әa.'C<8X/?1zx* ===SR=+rxGkLxuQ_/\Yp\cy) 'H@0IJ@`zgWǃj|uQ)+??'oJV/ƣپwOv 禎]B/"Jᅴ52!9m:C2ZM %ྒE 7a2^n ,0! %GpKe1Ks5XXX[pڣhEtieD h'}Av%ϣ?Si2)+dJB녎84#^0u'LhcK #!(DROfԆ Zt+) ; sr>|. usiNAX\e k4DeCs(yh!_޾~}Kq.ܴ#`cGnKys&xUn) ˅r,هY,qikdfe/G0oB;, "|Q ?E|cvxya cIEwk@jyIDU@\C<}X-~@ed}?WF?3 '&SmǸ|5?'upb kv ~Ɖ=sdɭȺRD)7T ^JqjQg{kB2m d]h?>ns fK:{}uA+'!2q74q~T]sv2[rRTxn"&R0oU5فU-b91A!T(=r6JGS/"Aq)bjcu᯸y=h@^lkW HUn):b0i^)A Y4v1 3q$#Q1`6ucjTr(u Cصb1*ޗRz[ۦ[쏮; lڽz#7_]jm!#;"@)}T9^hҞ=s3vRRN.r _pN:yAd܁:h >dȑp-"5!NN3`wd.9X)\L0# ԎZmZY1aԞd^+%Y{3xov+в^JV3\}9Zcz#_)em@ؽ뱱SZh]( fʺEfVj~hK$E0c]RFW6}* 0rI]Kx>x_/~: cgPva /yVVdPE*>J?7X87X>"$a֚$Ks>D7 @ ^|Zjŧ%`4pT[ utbIOkpuʁި1}kCC7UeAO784~?]=#xz=7v5_\dyUc$KhpӸ BϥU(`ggl.nL3z_y,S[2[-6t ur"hJJҷtϩJn%8䅳hOg`H&|>\^tJ'fSff){ZgWz[EPBvGlƝ, ?]|Cs?esI/sJ.t֘hJmfMk]FHUcB mL6mҚqࠐ J`螄q9[ knnypA1T;qfL\2*ˉӃez2D*(4%]DƵZ  C)|k]cmTIO|EHh֮yʧ#1cAL͙jb+Px3g&9+ޙ{r-xnXz`Jj*kk_]o5J &8/3V<`Zq7*i9YKK {J+y̱VvAV^0X;iB~Ϲ=3n@zn_4Aרh1cSDkIϪOЇ${uJ˵9WjvjҔ0h@X2ll1SֱOxmkl?Vl0nc4^M~~}zf*_Mo~'3xN*MxxNY}:͚,<ۥP#h qK pK\9h/<YC^8tM*9開A EhZ)}K셦JY41k8E632ʜ"LbfĢ*VQ`K2ʫ-89(,6{rh5]~ksNS- w== O(v}7[Q!_ŢkIGa*[c}}e;tG߽uMz5`@YrʇD֩* kGn61£ [QcTc5:Ps0^K107>Z?P_p*2"?8o׮W v)\)IS |Su+vj{R(+ЖjZOnVJc,ѸrG %ԉ-]l,M1~ؖ^h0{iܔs$Gӑ$ L)WxtpsgrRj؎fLg3hf6C#uL]-SOt56I13h-)b naܚ~<>f݁FB؟3F%ϓQ.7+w5Mwh0{6kF=tfXdO:77FĩUT$z]H r{ ;zpѷ{R.F-v4Y9`%D+. vk H候fOW ehE&bhK$X!b),w3Ƣ#]ҡeD0!n]lp<=3h>-(|d*vw.GgTg2>1fcңq v"78"U{pBEWxtpc }fdL$܌K' &U [{u9s3 +rݸnfCPWz0+S{#´S_cU(FI8SrFbPC8Jl9>>nzxQ#⍓poWL8B;ǥ3~xu8;nCvhi\٦'e·&_]0\^=|X>@S׎{R3S LhmXFbx!F-hvS?];dޢ0S)*cog˰/s^'3 %6W9o&ſNNb"ZaC9>鹶BrFI  NYc=i\? rò->AG6蘰;IlxGKli뺩* F-QaKv]@ 5!8}bUlzg̚>#¯X4~0ncdg4cF"00 `SKB P܆ݪۺKP:Kqإ`Q㷇s2;~b)5iv ^"+(l"i‡cIpm h.PeWT2 a!Y=C[NHm7JPM9Ъ0`,h+$-Imq56EMJdC+Mu('eUp:JGyJAl`ȦMc`R׈旕RzK8aRYS&F(J:6Wx={ h>IrQanh,'KDI}G-=1DV tc?)!9e j#cLʐZp`Ndqev'O\p,22yz(y,3JCJ`۪+7%5wÍFl_7[(ߟ3zYwBϥДt/Cp7K?ka~e8䅳hOjU $D[.);F6 M9`-{ҭp)NH<}DV޵KR4RҒ@`dl_y@ت &3fͰh'ޝsĕ)@`-!mLgT>r1HctQ>(rZ |'*%8䅳hO1G.nK7+8t ur"ΨMpUBS[ y,Sª♜^H,%)DfFǙe+eΘS2f F'믝CPj*)oiUN{|v'nIyg?QbHB$b76_{3䔡I{Ȝw[HVd\ݘuTunkTߙɮҋho2k+r:kiҷH 9АV ф#~Q!:W~i[͇.iܼ5M}_ jY7ZSV80@A⓿!oӿܼS ֭|~/_M͕Z9T?O@vb76EQ$9SCRI_Nz`-E)~H>`z?Y` GKkp>`=R:PTklLR{8/N- RK ԡvԡ@B墄 |$Hȥۥ͖x9r|X"ͱ1"4[ڔ$ZԦ|r(]'R`)_ox.ʊOj$|Hji_ D%=^\\#IBf;B͐5FT- ԝ ŭɾS.PIbxcĘg|]5}MIH#6By1\,T/-~`j.=USh.#S[4H{&XD[HyN4<1F 9S->d[i9BCHQK?U[c$Fn5/28*ўWVAX$ᰮURC9nl1`k֗"]_ T~.\U+%?U1<՞1'j2nzj[%T_ ;Fx$Rm?J^TA:w瘣nؽb"z۟/ z坒M-9"->NհΕUD}ʷKDG[#zxp\)`Ŕ R9'ro!7IXcH{5ZR'Ղ:Iό؞!ʴX> 12T.$ېvl9;H!pYc+yIdcEmG޼~L{oGcM?w\oCv­aR&א,\@ow$_ȓyyt@$G%'EzxQOjՇV"7{Ƒ f_ em"@r^6 7%)'A~IQ؜陜lT}U]]]U-bG}l fb*V˞?iи:aP@0Gs'׈'Y滧weW.dԯ'lwEvO:͖q2Ɛ֜Փhl1'cb.9ֱT/yt$SBErv)j^M9#gFB*&q#7.uzZ9,8Y=Uj+\[ s9;ZN%)u,/<M{!UTj[΋ ejeeezagE2޺*ztA0QqǪQq=bJd#\)q{MKN4GRszm%Qu;LqTć9\jɧԩ'n͍zR &8oOE$W)c0}aίJ/)R!9~AY4Ehɀnz"MI0UI]߄(5 2RNKt"ZO/ERLP1P_L=җbJ*Q\V医Eb:9ieדHe u! z< h.G冏Q`1f/cA6&E~ߠM*iƓKבGo|J%F2ٙ˻;%ɨLDb5}Ƨ4ف*hf%CWjK%ۿ}8ͰpP.KzN֍YS!5,.R x?/>/RCwˏr7rAS v1E BRSd4o@etyPP]x}eхG(&xJ1d/Ƴ^d9)hJ",7"Ϛj֜q㹚Q4\F˫+s'9P B"S5YeT̊<>'(W[9Ay%pׂ6.M{ikG;c6Y=UڝI4)5CC^`2rŸ)$g^hgNRgB}R0Ĭϴ  "͉Vu!QD*A;9=c!z*9`?SDZQ|H͠ݝ ;|6(zdzX'[f%O\AI5kOZ|\mq#Nj>:i*2cB9`t}8/0Ys1au\{ߪKK+.H܋VwO3]:6H{􃂣hf YA`3%Kh^;fU #X.EPXex-n6(vãSJxp1+CJEaU3w_I넔Wd霮)n?t}3>*ᓥ?~g ,%yv[3惰r.){LNRM&WnjP2'63Xmff9 z}.<`<)#Յzw\E.&J잾cĘpnp$xflL%cOH t(@zW5F&Uwt58#.ȽVl4|V 8i[TQhՀqU1۱˄>"Ve±gC'T= tJg.V.Jɔ鑼ғ2c>W~TPilHgMA a>gg%/t$e6Ѕ Pt1kMČ4/ ;Xӝ|[5"yFsFyջ=arO=iA c4Q`VTÕS |xb$sX90V3g j'c bpBxₔ!$1B}K>0enDpzi$ƞa>grU,WLz$ޤBD3}o%hV}[NÇǰz>sdmyGR 23`Kҿ1fL}#c?YY3/ l|ΌoH#VB\;M$"$W k ?U\`qOYdaqѬJ" $O'f xN-#R!%vhP1C D$2IxϮ:Y۫zjsΚvbFzsD$r1p9ɤqS}*+l)-Oxpdъ sG֬Gn+ 2RBԠj G3D(FXOp< bۊ.Ab;=̞MOю~H14[0RCMhno|5.Oo2G;Yb}-JJYTsX6`FZ)\1` ꈗt,d4*PPmAg"0^6)K@? zնA4`S'z1i%@ +J ʶo};+&,Fl D_4v46_; 5歄 B ̄Р|`'aN9ŕGF`P@'cNA4VIW "G:5c=g+W(ۗQ^̄hĄ8"GQHIn ɡ2AL)- (Ts{y?PBFd}/Z@VRUOdQy6JHる'0ߺgZY%JڅUsq{H5#X*M(M i.`9/jo{_✅ˉw m_#JDD$Hyƥa(1T "Nꃙ.ZrѷigqBZC͉ vu z(S5tT}B5k-=lvH_Z=^}B-`>;g/~ۯϋfJ.(ynO;LծsMA1*[m_alOwy;ws~Gz~h\Etu,mwRQXiF'B=8;Ռ(;d1y V" ]A) ր0(\(T ,$xXfcާu M#9,*S**`Zc#cMՊ ͺQ k3锤$}/3K P Qa%,H[b+9"E2V.Tm"`DQLM{MQ)k8GsܥwK5_:Ze6 VjO_~>ɬhА> 4D0?uYhxt!O _ǐQĺ#K#c Ij9`fIy`XوPMF4Njpat[YnH)R~7~ՅygD83Ղb{ $Zu$S-ryoE.!1Nߔ#S`9Sْݽ밾)nWvss _m tYJ#/x_?UPTɻHo|K1)}%[㹽ICo53؇UHvh 0@l ֭)}%mDOdJYjLևUN1eގO)/?QiLhVaLFsƛcܸ" x xb } ]` lf"iu^oQLV;hū T!Mtq͍bLŸ)79'3kwv[4Lw)N¾,ѾiZ(2ʢCلtcsJԡmH+J5\y{Vw)o&jm 绐}{-,, BKosZa-_? _DP"̱jIDQ+Qp]s`[׃L2Nx>}hG]_/C|L=(p@00RN~OJO\C2~>83gWD/hPo _Xtw C8'{U 0^LeEg`ӢҒ&߫׼6]po+Z0Naa'SpSigRl*Z,n)x ЙPt~9?p}y fh-rDkcy* 2ԡʁ; nJ^:nZEEP {^j[5k$mÐ)/+I*B^BR\ppD (^f $̐(p D4EɊ2N`j4!WuU!ƥK$KKR`xE<\KTu~N94#W$d6yߺ `R1Q6X3׽2nn94#W:t3GܺpZvFrS?6Wpy=1űF6t-1Zӵ4]nnз`(V.̑Qxh ҽLוdLXg èɴ9y7}gf.4!PeZ/"Aㅢݪyj_Yfxj2wy wU`yolڲyeH |gŦOQbԧQpX\ϻڕ-4wNvu~@scJ8e8R|jUǑ~"j TH|jgiPu~m3Nk|Ӄsh8ǯx}T|[zV8MŖz!xTv .ÃB+LS ;>6zU}ęIK8^Bu\bӚ/XQihF&B"ͭRѼҽp700_3a|_z[rΤYezY3Lcdv^;p%=1DR1˜l,-V靪ڻSHD:QE%ҏJ(7NP"'eq GΠkzt<}*vUM;%qԯSVkI=aђ~ܒ~%T5 +g$RI"Fדʗk?=xthu=YކoK.#0~6 /| ے ]T^~j#$T_O^M9C!e#F}ymfoF%(ϛhx2,qu7]/ʩqCAȨsoyфoI Q/ד1|o|Ӹ["1Hag_wϟڼҿOU:}ѨEXM1A'GP͵dSjS` `]y^Ǟ E?|fޅ qU:b_9ffXcRyqtWӺPX+7f'Ҍ^ ln!հ~X)!AX=˜r'E؏}uM n~z]pqCK\T3LZaZ2q8D'vnzS15m'h%܃ceT.0"#`]ub5C`s0iRBS1$ԐcR yq $|L 9X9`Ɩ{^hQR4.A'QC3DX.*ͣ!añ7%r0EEzKuHRÔ1>>r [Ռ`Rx ּ\qJ15dVGvNGƤ]( 0W<%` " cYa&D!|6a*1D('" сK7[҃aD{dJ5,~(xFxnͬANG0H%P84H; CflBNfyLclKp Ux :ik,sc7 V|"Asq|GG|ҟwu[hUEz\+Y d P+֒9%W֒Fh֒79ge-qH_'C= I:5v,硖Q!NT_ڿ=B9D銙v{%)ء%CG*w^E5Fz`2kџСauwoK3t1)WR+*1DUpL[#CxYUv.x`e^,s._J>Ɛ!tdN)]˪j4ҼjavU;{=)(lнb1*N߉}H=,85[x [8#-Wk( $TfkCP}O%}aBd'w S8AYONP<~I>ם~;K(k)۱q(~F#bC}kw'si8դMQˬeS̢ YI4zXy8 Swvw!In}gw rKjp"Ij?sEQJ.8FѪ#Z\2j,sLq9tLЀ4vw^3n}kȐDDtT.}[ ɇ$^{^D |~Y㍯&ʯe\-w5xJVL[簶:fYɤ4b $b+@)JB3K3i=-,XY_>鋋z(}$#^< W;PKJX'VSV[)8r -)50)WDaD* 7/׹'HJdx ;0,aDpsZ%-A#14-9Ή L`сZz)(bЬ!{kw!ee&Jҧ LLAMbA&li<`Ӑb`jTz-xR;YIZQ&%^TPӢFr}j^ۂѭCG0;99n^8{y(O˘8;v Ԟ2D2l'|%=kϽT'3NIDXD2| scY9p,!%7k,v . AƇ|SRK,8göDuJuLRY#C FޫV3g"I5|Dx`t XJE((+XWIRvAlCAe"260!UZ`'h1kK=UKJ6aL9SY=h~s7+=n'KYϟ}NG7\ۋ70,NIl4z(FP1$i> [>مC>^cR]:( 6=qawQ5y3Zg .|l2|󣧣,ky 7wn4Ym~I$>[-IeSX< 2Tn|')^W_0sVN7}ijb¾hq7 9ӿ62*-CJ{V[&JhGI:U{֍``R1Q6X3A?rMҩiuKgokhr6d%LR#iL-!JlI%8prGK2!t^nκCzg%aZlWT bijMV͟ S{[I"WhN'jFb:m4ng\gͺ%4ֺА\Et-)uTn-7i4ݫ͛7f i@}_l CBh؃L [-.GOPOW-xҕ>h҉ߐ X.M`H"8S)tiEӟ/wNW3 ?޼5ݕ`5\QUx$sBP"?b =n_'+a7qP|zi\yVpރb\P UEEPi=&`hq :2{޿b9JmG~/S=S+rCC>rmSr]/4~( $ Myݴ#i%RB1zF QŠ/Zg\兟]/V*v7#Xn߀rqX/c) zTH _C9P!۝/h+蝙,_S_g_]Ot'*aMI| Z "1ҋۢ53;?{Ǎ m~19'f#I4nEb7tF*./׫ª&ᥓG2W1]am<%:xgLBu\X\] Q ʧpi}mbm#4i@DŽ"$jtѩyA? m |ym HͮXSՁ5y&r҂T'}" +mrl3  .4z@:is樢(iRXA Xd\ZS$}b!Jv\}]H&>=}Ɲ~kSqI*1*9>ёSFJ`HWAD%A5kTίRӘo~G$OQ5Ozu.P&\Y't[N&wYK{sw=W͹\Li($z],i:S[mO7jT傚BI2@Q(Q6}H͒@>1FŐS&4:YCZIOTݺGcxNƓJ)6>EG&'TչNmYUH{IZ=^A3A)'YZ+zI(\,R~:2D]{F}JkW}z]E N.Ժm]\A<> (XaE$Stۺ<>Y(:VW2Riydk,FRQnS^JߔӘeSJ m>h99:j 1U4tI9Mi$rWNo7JO )4i4b ewW1IZylW1vp͸jٳv9|=!QMXiYxqtt&u*e 3PKjV0k9[ᆶBO tyxNk HkĊcf'ƨrRUݭTE7ZnZҶ6ĴJ"ibB?]HUC.UEAg @B||^#O"ܺq HOwN,*Jb= D[P}/F{#:fʻԇ[珷^-c;}`!d7ܷ)2]0=7!)=W)Ӣ_'.6JWx,jZԸR.H3@f P )Y0zjrPu34j(:W0ŷi@Qɱu#2%uET5짪 $/OΤ8QԒb/oj^4X* NW1%jNઙGU3f/)# 5by!>(V# 3"hz>_Äu _Jvj!U7'E1)&BQrRLb9)>zƘjSbǮhVIc.]ͷW22eL 9-*yW=u{)n>m1s]2,{,b/f9aK/!]/!t Whm8!^K,|ũ%7N:6pq27_|˳hWdv\ALׯO]+Vkb/X!R1z DӖ9נsB9Npѷ [ﮯov ay@ȬLۮ^/?~ $_/rv:r|LB&|HJr-9 B+2i юYt+Ii pEsSvKCzM3kpG2/.sEFA[8 \VmSyMkdDO?W*W_Rw*) c yvgdwx>G !d'EaDhM 3Ȑ, ( q)~OvB3"C KZ-h4fjh$" z0a:9usS6|&A nQ  "E̷Jϕ# i HyY]`źp}5ip5Mx_&5N}N }4ʑiQ$?V!1-jNX+-X=( <Q+Py-;2=A:``B:=Y xX(eB 8| 34DZG aJG0RS,4W> jn Rj9(# xW\~H Z#yc$؞cl ςO>鬲b#Qe╾k(<(#B[U,!:>޹(r ٌEHǑ"iُH$r/$GFACſV҇0Ìp$E`hCcO9φ,}oZ% tmDppS">L.A d =WR@Ǵ9ِH6xIdP#c ]?G(v+PD~(> z$ gU\~M`9=1# 'Z5al7@(YJ(\0&d&g s:YLmhag7f^,?t1_^9 Sbz6%7/ =om\Xټ|l]{yB`+EP3pXp3J˼5֡V9 bugq2DžE3cBRn])8r$ߑs%;|/0X­,XKS0ሢ[@K(Z UƬlJ^ rh.j94cTeV74%0d~kng lݚ*>ZNTVW7.~^E߇Wg}}(|X;]]m|41ϻ/˳o_oal~;vqA/c:7b}[K)д[nfo}TTc?bL8o`-!څ[-k*[ٲR<߅)[Cp)?AUH\ RX'w>mFHjթp7t^htCp)"IlVHv]BPٽx7xs~[,6Fzi O\RܝoشfmFu D8Q3ᛃYN\ RW} A!U|MN6Q|ȻI1QzӁC G#GS24D | )=iQFfG 8Z@/I%^ k :@#MB@'AƬ9BiY X͘*40FvCƬ1t`8;%{,[l )Zj$NbU9~^BsDrZ57fu<bb$cFmS=FBS1/ExC4Ab:cn3 Z Mncp΢Y<5!J C 閉AdKgM :GJ8 gUoOJ7Ls1Ha1H9Htj閽T6|,Slnba MՏu(GY`LL#A$%:0uHI1k2WYkE 'I.)4w>'Ɍkh$?/MI2|,Set*JȺCo&e^Q~ŧyj)=ZH/ @,3ޏd??UV"׫ ep?E{hў}&u{l{nV4W ?Gd,7`"Qtʫ֢?!v+>YN{K?P/ֱ3Wv؍Ѭ]_\X{6b zK]_/wBI?yeğÑWuJ@Zma+}RHr|P㯪(&(w-iX@ 2H q;Ls#͖O$}ik-Y]#H»+fu 0b>ۗgOOVmGl 78I" !34o)2(8歖ji|`ab"yg}Xu~]R>PT* Y 43!h AZ;`ٴZro~ćO] :!aGba!BqŗG%3/*mF(R4ܴ[l#?tCI_e! tL[5ظ~&C^-\s{n\XݮQ$̯7_{dMAUC.clTc:aX4W|zDk!Nփ<2dx*C4>U4C%Ld2U5qQϾ:I*>jTxFg:4{V@gH5f"%Hȳ$A$;Aq1%y]{k4{1E{zI+N DC$4-N#s(MB`Uj0z郉ƚXhP_ͨ-8p,F5}k>c ]{)񐄏^(_mi?48JCrJ>)h6n4_7_urOlѓǥ։c4xacHJSE%nPaz_1YV<IX`1 ;v{cK,gw1}ȗ>dtH{nTG֕ :d jS$-|>ECIaGy~b_$xCkGQhQo)zւ+^e|<ID~Ðyc*.ah̅*À!sG(m*Y0{PNB{o[;h<ِ҇ÿ/z~ wZ5WgĨ~|:+膗)_^N>uZ.6\&$BRG :l1`L[EQŪLF/LI)ă&)U6!!kuY3R+63l%O-jM_2Z_2Z sN/OFfc'޿w>돽ǎ^lৄnyfZOOOX>X/1:vZ)nn}r wdAZ\h!]hd xFy(5 rs9S"7,]3g-RG7)+w9Vp3 W&R9}H _47k㕂H6֪@g?MًWn^_~ɕۼ5֞U83x66vA! Y=ZXO\zǺڤtZ_<$rJQ fwTEg93=.|xI Q8 ;]qzi4O&C*;3D+qr>Zߧr /V װG>5 ә2LvZ>-ZoUsyl 4Mʣ'F|0Iنةv+TUmyX(ZimAb_' ;뉕 cO: ZJ7幢>tey]jRX h5JwE,FUK {/й:%aauU}íj9ܢ:fk.={Ϝ[p"T0 "z%!d%X-ULJp{κt1M%▀CYxmy1:K$fgv#S+'S@;q5!_ ʠsc89y|irmt(oWTEnkKy"l;DE;D@Ot`$41y!FN›|5\eUCÈ;*"%vLJ2RC4W¯NQ! '/TN%6fἈtS^_˷X Z-K4N"zmU(rPPLlV :_ŘIhǁ1UZY#;ѓ8-;; 0`.]d: 9čiw&q`;r8;,@4#&TD)PN ;\ƿ^>[6\8:.Lyt)Z}9S0*|PJ|N@LS#OdŤ&&TfXny2(G<;־-0`hɅ$ϑ MFg¾DY.3]絧"99;`ѼяS^sB2pܢ[:yW,WrS@fP(;˾dfN u0k8y0;iRES) ^%RڐBJ@rfwm/W25]/] CdT~ F5!F,d)20Ura.J1dml| J f; ,/Gۀ4Mк”BNqdT!G+#@j ^1Z;~:w@6\WPJpt.rUiD`D55> 'fa3Zj7Ztt5lD$mS IUKC!KYDd2//1^qS Mʆ)m1 s֖se><>/=1C|Zv@Ҷ"Hhg7\I L'D(r~r͛❬=8ї9ݝY-?ъȡM֪F >#p ;SmKnѮ:0윪뭶\aoS 3z)q$L_d/̸NuoK}9Xڕ`j]y< lm @4]5LdefE#qc\> 9RDraj"ٰ<["$6dkOѩjVz~ VFNƈ q*2*/p}$?*xY r';f:QnS`%fL_Qj7M =:4ag6[ZP1'ziZMRfs4j[ &Pu;+ntM kl^g@5;zixȦ_Fby5'|Z1k{U!-u[B@;7ئq"gir9ZX;1݀Q ăm sMR y3䚀 9f[3fҬ4my%nWutEͤB[ˈvN" *-=Y˅.Wg@4uO =>QBk-O=}+z&O#] f @lek"%xMoR$^_匔CU-u Z8q1/RX(@"dn\)t^\b~\ 'ȓ57Yc^ {%'FsVZ B1g5U^lGak˟vy2'k1{Lϓ ؜=')xG/׼,ʅ^oNuyuw P~ɛzo}WwemzL[&dh3oq{y pM4ؾ^: OQcEr"v-*rcsdzv~ZS-Nd/D8E2Ďּjގ8WmS[ܵy1uu18拯(%Y8B$RE:hkn3Fƣiʇb̨֨ʮAȿs Þ6o^ag~y+n?~U G?0Dz<*oa4FD ӹ:PIJ0lc DSFF;u~0@C&*g6;K芣,DDEv-P"c3%G6@k5RKi"lة z  8k2I+N-&Y) h0!IĘ JXB/ ")bpՍ&w$ݱj.~~wyo%)}fKri-yX.?Ch(]^]~lf.fMWL?f{ͤ~嘝B#o'WM4ÿ3ÉNGhBN3sĪ<.۪[^$?_96BL(vO2~xݾ+VqhYS@)k,70ŝ18P-CLZt%&m7eBϻR4 d:hKE)LN&l}ȻLJOV7FizaG9f?ߞ.ӋKǵ/&NN|dd[~鏯Zo[[0[εZ4zf5j- R0^vev?"DȔ|S'W'.>#U}EJ@@^4h  ƒ&,[GB|UfW^a~ UC"<9NVM?/nz:e횭.J6j@إ*-U6x(GXOg\N>̾neਦV'8Ӱx~U\赿xRu?Mg6fsbz,{জ4:iϒHZTKz>ǭJW?5:}qViw6ǃяSY3k2Op}\KhoTjȦLxWx+%*B4>pH K)Cu =-&.CLl"he4%XYK*"k~Jf[PRv AxPJF5ՋHCQ`U֚٥Jeqِ!Rҍ?'K8 Rxm[3[R0 NZzFi\DfdjS~Y3z.:87 YRUnyeU\)%LmEܑ+#F$M9qeKMOte֌$Rg'b +)# 3d H=X2 _;>v ,X 'g=Y@U8jh*vZRE7e8"u*ra-1sEP% 8Jrg_j'x "'PCH s)Q(Sd{= y\f  ~7SU8;fgaQh`؅P#G>sY!Xj9Ý6YrkmEbSaAFYE*'k9B-M069\@ %eNs yfEfŎ钣Tx( HUR{UۭAˎKTО*B`3+˵B 7AY<.\#Cm4W,95LFrkTBK-Qg~9rľ"RL!pAr7jKn/B}FK1Ձhm>7K),ڿze<G{tO'wgjhYV-qqGI.o֯`% ~]R%Jn Q 4sG1x&|5n'PQR=ƍ{>DŽZ)ȱepN5{hɬ!|"bwD6M)A(ƨ.$|"+[$Y!ml&:ߟ{&ѡ(='B5&BBB 璲do*1\5CFP  :Bh!!=Y3e+r-!t+4VvlE5ETb&gRTXoCub^XtJcE .h0KV$E;$j~xH ;H!y-LR4&6^xrp!I5iZLŷUL|zZD2mfYZG"Z#,i71ZjmM;"vLg.gđ.jhlH@ncFA^ [ffɝ0L;j@xmZ%)gvqQ`,#7Z#78&S-iՌUbs{p%! 卧y DOM}t1gW3囡.U8AQxrz9MOX>0w;n^mHm(>=o-Cce3Xyk.wV34KpCc7mi347 wblFwŭ04v_ ?]~:~Z^`Tem|rXɮ Г& D!ZE[r 6С#O=}q vۊ 'Va䆙b1%ia%SjI⧮I=QN zΣGuӅgFmѻ+h@8rq|X GxwdpZk.ۊ6_/2_BJC3:/B%UMEh I.',*ֽ~BdG>?1q}?lvS8:〾}z!:HqgbvtF;lN98%jrt~#悝"~7mzv?G|l׍qR[og|y/>Tw{]JͲGqqmwUn=\m"Q_0aIߠdxݯ #F{hZ}+$7xpR=9+Y)IW7+mP"#3%蒍@d9fB+l V (MO`w؀h~۴R&ϩ`0Byn"ŢM(>ƔӶH4+Hs UF\>uGR;kZ8г>i+J5V+E JhIj}ӶRcY3+uJhIjٗ>q+u-MUF+"]DGaU6lkJF+C*GJ'm\HH"p|i'b;D2I+VJCHɒYxlLlBd?@j{eHR$b1r$ȆH ގL:9٬ٵ=M@ɱ2?H9XeP5U6}aJ?8/a]ògFcPM/9 k % 4VQw@Hdl%v %픳d:0&WN=t4ч>rY?L'_(\n\srz޿#|N^7O?[23LGY0 wPmĜ'~ g>7;};Ńoxc¬%}mm]a~ +{[ʳ?rᐻi!re<&Qv8a[kRr4}T#IR}t8Nф q c-C9.K)@p2F'^b3i씾U~됴j֖W9JYo%GL&爍 rlJj,>tq%Doequ] ?}b)y@^sSDa*yx9)YnI/d e'Tzmc϶ǹm}o\0ʻMRĵ }xW+ʢ]ssʥQe+E ʢkP. OܿUr׍k¶5aGr=Ԅm(궄hk+Q÷`3 %{!]}Mga 男* az:{kQF2`;\bjZ:sׅ9Mami25=Ȋ|1݃ΊgLi)YK9{Q}B5ڤZ7rS ޒ2ZMΩVXU찵T2-"רX?h-XW,~/OS.܌qblzh\ lkJ޳ض+w.^rzcWm+ DZSW=="aU]8M͖8ΉR =}SlNA^hdr˵v& wJ&]2ed7tA &:t)#\V+y Ά | Q+jA A%1HAb"$EfjS" t dgE{OgiEs{rKlD( 2r k)dDDZ KQMkw7?_y+%w1EP>λFw蔮gv}ξ:͛} L>aV%h`AD!I੷V@(Ux0& 4OAz_1D jk5Pb”f YYk.KʙldҚԁq RhF5~k-]hz鞿Z=~%珹?H߸hRѣ0_,~_s6Q@薞~.z\ b]^尀F ה;'hSg @}N:ܺrT]\b>}ztuǜV6!J%ShʬZۓ ^uѲue{ʏl>mn9^{⎛)w-|f;KfBC1+ jMۚ=G,7-oz7?qVfhQCk0De)n>=8襉zSDAvo=snaԍnM8PAŞYq|06l)ѫr`iвP r|=4P-Ku>D`F < $H A.:-evNFjMZwٞj yyz yñ&_U Ԑs=/ ,|F/nG7wﳑ[ GX['?ۻ;]4.޽\P{LEDjc QV$$EJHszI\+cZUfW@hP:*)s0"i3[0,r.e `{qk&ZZyvI+͠iQ":  e( 6P.yJ5+:);D۱k)L].c2'ZjLX{tYc?/ /H^|^? bA!_"x0 5l T3X1)ᲳkS;/7fgQB˜'tbE gE$m-olbh2罈_(͚!*p)hιAIc!Zբu[$L]|` 1lyhT.ڬe{ j6uCmv'.n2ίm~{9pwzڀXO,>tN\ZR&qB2ZtTEG1zFK:W&f96_|/. ֋گ ~t5:۬|o:>}}*yxY\=ޯw4*LMgZ}:3NG3;Zqx &|^Ma8ݫ7!k6YBb!*,'Go,i-*<+4>la|xF PR ~ /%iaFIHTs: `z/ªwWo|Zmn u_ FAR zu|yNJ)Aps9n q( Z|GAG " x{/D瓊z3.r9Mv.)!'Vbm\LgXBV0X6^;KdLCUa>2 !lSwv'-ToHo=v7@]${,΢|9N@VN- 8{؋Ub%EIШ,GYP= |Ie-S7*^=/h_t}U툅54wI5g`j1'@y J&xo w Js^Bj1Z=@*0OHQ!5PH1^օgc;O7τ(}LAo>QRA}VX37^Eէ~Kdwv<ڥNR?tAxdBvI^:!FMN؉ZӵS$ >=1PIWS)qʤƥz;Lٻ6n$WXݐ7UCVR{q\3օz9?`Cr(3IŖ44iQ&[:ۏs%gRu:\!)3ZJ )8j:í%ie1Q_Q#DFb^|jT ( :ÍS*F9 :mT̲hiAV"FO`N-Z+z N0/c4*uee<^ cxu/P18\Qy156HEELA|9"Z#~D%z&߰,L8D<~l9<ve ^^Gn)0l)$r̯{Vs)H>Tz9Ec{#wtyEVt=ЄY4zO#Wƽ'#)63z~Q-_X<@Xɡ\o /`9î-u[VJ6}ԚW(B]-5+LjvTw&E$=Q$>do48h%l3FF5oKkiZv=}{b ׊[j$\n-5 xF Ѩ׋7'jdrq63/ԋξAalnv6EmKR7դ:/RQI:t^i+Ϟz6w $}(ֳֳJzA$P\S3KƺBSvV3e!_9DS0(~ݰ#}@褾w;ܖ>شk~k һuI+U :h@^C⮽[EHBrn)N=g%VC_Td'POB@nP=hS BTc*At_j;Sגp}J "Țm;v!dFOT # 09*ڍtXVj^OECmrYVvM$b-u/cN{BII)~|lӉU 7&FI;=xUړ%>N'wA l79ѓW%BJN&7į 1-A6 gJ>DLG47(Ww<\Y<Vƒ*LkB^ER`ꫴg|OTej=aZo 9FNN"kn!TfCл"wZ+bd HjWLhPDRKg@*Y)d!LfxR)r 鵣#G Ҽ(m,ލV jp5,±Q挨 3336:e%UP9rF*8 E /D(M4:VHH}`-cF,Gu(jQeA ab _Gw(2efP)e9Q<.jC9\YXQjB-jH辈뵉׊"H LlŃLrdKlU:$}DhʏplU*K? <6B \R0٪ۏ<C ftTmz(2B k*|a,q 9]VQߖ+JIl.A9תh){퀎N\DqR2H]b׆*yR^iI#NGZN 6JO7e;ui A JL? SaՁ ^:rHaD@Rɱ•vw$A~{:YSU]y϶=j5zYJg*F5 W;k1Ij+pmMY{bp('ОĬYq'YCcg1Y8s)ԖHA a@;pZѦ+H8-"] +h8}ջ >T@'mC2ަnw="[!)Ii|+ ,U1I}GvЭ)m1>dw!_9DT"{{6߆Bd`WTN{ T9A>3%0{WZ˴^S,3*Z\).I*.ָ* lVxPD&jљp5CyACѕw;-'/٫k?|8,2 ~,iNjn5nz Y7B?~~`ua`'vp{?]O}_ GKϕUu3LU6`E9x+/ĺiX E9UO{Ro4.pcQeLSQa mlɡ,*qr_saZvy˜4u[seЮ|m Jֵ"'w xEAU^l0~FLZH{%t hd]ãJru8ňڈ T**Lay\*'hd b3Fb- Ơ4IiPY% LjR*PJe@) RJjңF)#q(e$[׈OMR_.I DO(=F"C)R֋uf/`J\ġҗJ7I}$5: LJR"c=@f/" ǍR`q(j~XJ@i]z(e<{{S_I'5JQơ˘(E 5P GRɰڕ(nrIjR '5J?!BšZ ֞Pz(C)'؋xf/R'=Q{R>~4n4KrǍҸ;QH (5/Pz|(ՑeaBaR_.KꦜPz(<G3f/OF)8B/U,8$JAơ4HD|q82!8{,i(R^Fx/\ק=݈JA^ !͛:6_9k_ؚnr뼵CШi2>+>-=Ӄh!ד8sL+%XDZNWHMB X ^}tzdz?PVMkiJ2#ԃQEP~tb,ޘ?(0Jz+/u0 Vy2J޸.EQwT)u0l1|{҅TL_ԋzwjQWY3+|o@"9٫qYRwm\3GgH* MOo|<~0K`IܝcNܗ&*ZύfsDUUfa!w(CMA9/"3&P7Jfż,+2ܟye箍w|K*أ%9q[z 7$LfQh>[1'3#3.ͦvH f/k.>x|G]Bݘ4tDmsve.NA_NpI Z2A#;AGHԤ;ńN,)`NݜQ1W`A4p99E˨Es-ZUP1<uFֺR`*%Ynq2*J;Qic(:3!8%)EjVF#>!{2P^5ѩJeǏϿdL<~~ o>?5_OlW !ú l&LoK3) Hwt6;sMtsd=bDp?ʞ<AGJp@Ns 1jlxB…A$`j"2Ru dwLZ9v;XbIAj-{w+q?3'"#ŭ/Thk{GS;"6=T=ZςFG?lMCymx? h#gJq ~Y~GW7gkzz߯e{r{ B9][ ւͭnf16YQ+[nGzXkV{77j٭4gB%אTߢlkM O/5IyA-AnqVXd{o|(Pr1tX}pU&|eI]Rjc={Gdwu'`P/\NN* H}$5ING %WWh%0v,0OeO~g:4egBUܰ,%:jyQe,{s/ub j D$ 1J\.st; a.trhc$CeKPwbua8y絯댍֟պ HΓ9yn-Nq5QїQ@n7%aAFT_?=6Ȫ s2(!]*CFa~?FvC"?D~vIͩͅA  b  !4#\**èCoޕ?'ཻ)"Bt~:@~]3-!Y쓭 _/RR"G@y8Cqjp;^tQRRzt}~qDES6e!Џ+oU^VdTNL i͘dU@Nlד߯4uu~~*'K47ou>ZƏ_-dz7%XEvْu@G@ &5Mu a3?SbƳ2"S8A-3€ԢrJhb#F%Er׻V*:Rw9"oơCkڜa9X!Lt y5#RA&uNHK!D]& h)F  lN?T= rf x4ZxӨ](<q- VF'%qIĥA( 3(P4b@0.%GX~:7gݵzOMYz]Yo$G+ ,vgT`j_q< !vՒT/#$eȫT5nu*A2$؆ny<]{ p )#;Z1GU4"ڰ*߷87VD}?oWOiyywep" wy'i] ,w}?-4c&3r8mbM"3&B'&qun=FvtMo ﺻs2^OwT/Lj'tK?3ow{ t1?vU~,yHsnj3N.ù-cУ#EB$ m*cօÌryh #M]ۇu0#&}o,kz"|h˸O܃^? ees5q\\sC 6sAO ??Zݖ9ޘDnbmj<7>o0fU ~(rjes~ dT|jup= =+ SD9j՝XAkl~,vzltvBpyxu `R DҼ{ûwFh9a1-Q-mw&P03b)F_,V&K73Bԃкy-ȱ^s6X]yUOEw=>r.Αٶe~{Um\ls|₂^.ꋪdzK<h>DO\>ټvlӣt,m$>*jgs3F oSէheeKg\IދhgJW}lj,'WjU}" h,IHA7֬; Q7&"#PƖeV`k'BjW5fjޮ?c|{k38L 1}NJl,? VCIHCQfѓkė,`yٗN;69fs*=fQQQ&*MAT ȉT@,3dnsd*#3 tGE~Ɩu 6ݪ=6v.ked+UC|RZK H(5~ oE")Ny 5>b @#L*1$Taj:ǷEo}g3 N绗d4|cPr:LFm?'! KPG\1éQ̹T&1J$hC &N$(ǒW47\ i3I b͜ @Vo%~ĺ}Wq#b[,ioCFX (WIs! ,P Jj$-(i&,ߧ|YeHC\Et{o[(\T'>m<e Th-Ѻu!GA:e=ηMInXvJmȀBr vKF,NTh)g=)H?\yNy='p-E@<@;xIEr W{ ˰5":Y3*+05 90 Bkp+xTVcב ֻ+R !B\‘^1#x :x;5BsEfl3fh .Zsާ9Euj zstF[DOصn^ te;s߷HزN[ujVzẪ)O[=Vq'́rڵVpj a9|)Y<$4XE yK&hx?6׿v8O|Q}ۇ<*gn,gۧO[V|z_> -Y'iuaJnӃKq3$•N%''VY6\Mߑ܌Ǒ=}&q2D22#?{e'r{2C>r>zktmP~, ē=TvCۥEB5r'zdbYOjB.f|=-]FyPJsftzFzTR@7sRH)s=B]8U J#ֆ|X2W- t.oi՞k,/?fvZw_y`,7J"zh.OQQnti_йx\NDjX5~&{J:m5uVz6@ueMUQGr*S|'fh aD uϛ2ez0ކhݺА#W b[&j FnuH[z0xqq;O.V,pƋշ5]/ީ!uްe:~Sg>yxS!D&|R|[O>Us9__?*yX\.1B}hzm܃tgrYt3.j6OM^83eq$%Euj ~;D\>nZ*al ӱI5\'Lh hnTeZm}T]T_76NRh>gM ]gTT_Z=i!j)wWNMMKP}ݠ'uN3笖)=T_7FOZzhZʨ2j$A5)z*+LBK*}҃R!ܴT*inRM5=eYK%%.ZjSKwS}ݠwmH_fO28H3iX;z$9~d,^6[l$[L,Sݬ"Y/g+=i+Mb;-ەk|BJ MRRD)DVzV +g`Ga,qƏ)[)BI;Q]s k{aat}>m+$J1)RʎJ1IR\f q^=&QĕWRfe:RiV e(dƠmi'RCE(%F (%mr]UJW?W;ȯ2isz ! +}|f~ +$tBk6QxE~iUi/$q) B>)VoPQI6áKK ZTwpVb6:^y+ -8}jkC$p6p& 4LhޜyibgTPR[^0˩OKߞRkgh썚 @vDe*'ЖNd$ybgvSl0hd$p^p5,`m vNk .@%^.O!:5Tz)ן|wɍBCI:%t`8Azʁ)Bs&[!X5޺kEp)= W6p.}<0F䟽 pXW`:zu^F7~WX|n2r^`X F04CK, ./ )x,ʂ5Ua ڎEE#vm^IJnp`I'S]X#W0i0Ƭ(P<w0# 2G _$T#5xˣcǢ̦?.|]ptwq *|xÓ80 XIbl |Sk}(}R~R3B'Dbgsa٣+a9Aޗ0^xpXݡWّS%Rkrr92z_ۯگ+ 0vĐ<#)#%2:Lm!f|G򗎽3TVzN C)n+US9Ih45' Mj$mЃ6DKbFԾjÛlEnsQ܍]hUD֩=;5P{6V@lM~3)tD ٌw tBvuӣEӣ* zGCr|KfbȓO:h7qJxk;<۱q>6j 7RqvMF)j?8HZO;ֱaעrCSF~?Mz~|ۛo/NO2Fr?QJ{s?S_B71gs?HM+VX: +zT~Qp{&G!LƟL,_쳍CZ$Sjuzu7 eq+,ŒWNqj/nZ:)kB޸fٔG?Gy7Bû trĻ. s[M4˦$=xͻK9wA侣w;ܒ'`̻_HnmX7ѝmJ@dyZ]FҫB<ӻzfMQ8o؋G W= /(m m:ܡF+٥dwzXum+& 8D#}* ڗp}NZ8A>SdM[ƔGVnhӆ8@'P/ ʜ&@KֲL&=. -8}OcVWDѮƝ=Ud#=cK7$Qﻂ#DDs*pA.Hr9I.0Lj'1=ŦV32 9-¿tp8FpF]h'xQp@F1=1 0c{MH^Gl9%9Iu/$g+GµFHk"4@Pg$Ij.u`Kl 8c 7Zb״Z#~ X1*S ARA^%J@E gb#PP`,*sՊ+J)ONy,: (jyqS4] 1PZ- s9TGr.8YtմaOjj3@ Aݳ;Gqs7i#ݡFQ$ vK#(-\W<cpP|oJw#e"§0m:L*9 Sz{Ͱv)62ZRe9@" b㍙6FB$O{_e!n4i Hխ׀*'3¾[NG%6^}6O,T"jf@ Z_ * L_4R6SѨAkHa(&HuЭ5\*EWKa"7ohHR1j6i(s*!QI61!.V0kK^&dJU+xcJ=[]a DK,T7ӡ.~Ǥ/=aAyգf³O8RHy_}pݑ SZ㙊#:XlO?ѓmy@"CROl\+yV/ygajdYaҵ @jU¤w8] q]OWQ@=jGAݿ#n@ Rcr^6P9gJ,;@J0ŷb|C#Al*3CخD_cP}J%۫)lU]B6e٤@aLHC8S_7aԖwK]@)BwDv2Mr_t񑊁$r_ev0Dzxw擔T |ٗyhJr-}Rn18 XPVZ#*}u,Y yuuMx-ӣF}|tDd'>đ^,y4m~h41ܥk320YBTNkaSL@ ľ: -4ቖfHʆv鴛+|Qja<>c;+5}\0-q zLEM˾61cd%`S!cj~V\/F/=YTPcj0+T#pd=tj>p yT!cq3%T+ZhC#kZHi۟A{5'j)u=RSu/wO 5ޞz7 Hy)aZ%_{*R+ Yv';$63o38Q>zG2I8#Hi6PI-7xDATJIi&y]jΑ(=`S/0_]ԂqKQJqO RQƥP!>Q 2 J/&ԦCD)8 VmRTq(2=?P*z$JK}^Z03a4;vPӱQU)^^W8PM>{G6JP aU_Q(Fa448|)8b^CF)DfOȞ#@)DfOȞ#Jq,'h(˞`=)8ƥRP*RAl(*\/Eq\?l"C)`g=mRץKRL@"V睎q no.~\_6yFXuaS= |Rv>֘DMddaM~&L$ ET7J?{ogW-k,0$)LhƲ.}L5yP8\DvHJ(g=4d>]_QliHIuUو'HyIcuz,E*6Yܕ?_ 1cL\ 0voBoSTi/m$юlsZ{?uCs;lfc?$"@`čwE޿e #ovFs0^]?]"Nm[QԘԤ 9 68*́ sZ˜4g3d7p5V*#>/tO?Fw/u]t(Ww}Wh"hAxjT>R$@a[s0)7\`h,0MR9 R"+%HT Ժ|SkԡHӂW6T9~vLd,52BTEjP31deZdRF/~^|q|~4YҌ7CV$, bJ|[M8܎ >?R~qgDtu7L1&Y~"+ Œ"+RWwUoXF #G9tfb=#nzA/cXv>y{v'5ٹy>;f#͹^."|ssSv㏗7oɟ&;e_ZʹYϲr܊f'kQ($\ R +EgT6C2dEKDŽ`r!/)e)VF:$0"#;gzկì[&ɧߙ ' c%`yq>gR &BZs7?u4;xtfC߻_/>餛rrw#:+d$xwJLˮ:Z:g"U1΄.k(0E.d{A{"^^ܰ)@=ّ&R^(Zw1|SݕKCvvS0W..i6ɝW.* jxI '[f.)}RR?I^9)lemsh{*kI1 V͢,X)Es67eMcTc0*4Mݙ[x{ y(ld~>;Tߵn%n,NXVyAFhr2Ŧ.9M3S >S֩* Xnauc !/BU#C ׉1=5lܴ:1fY]|Z>Ux[B>";̓qMأ8GZ#,Pd@^m),O!^Y7Tzɷ)"I K9J# kEN\sdp27Y)hm&67T)`Z-N^?:)PSsJt gp\dE mt2"'GI5-1[¢iIRV^|y\m+fΫ'U<|U^Q^#WsWWoáweg '~+(yC1f|A""Dܿ"eg;)rýR}*]dh.eYm[qHCHkV80<́A3:w E@upԉܡckF,}dGe46CIP˻C^MZ]OJcA夡R@X c9i ͆4 P5zʄ8NS2"8jFq4s+77^%0p/o&cvmdskϔ .%![vJYv gphGIlHr%a|N xֲt뷱`dZʹN3<~(Wf;m%5Tf1Sar*(<+-GZ-$q#5`c.Q&lz1zbh`Qjrئf*>*a}f 4+Abh@:۽0{ h5 hީ5MvJA;\wm/5jYdݕglG=M'Iɖ޴s9`{5uU>8,-WBbv{=Y>m>-7]ٝ ѲGg}:k[}ʽZSꬃO'oO6Q櫳1< 4.#rG^e=p*o_7N5XnѻuA t>wctzf[2HB8D;aҍ|w{vgޭbM> nOOEz>!S>G{$HmYeZ[OwP.]GV758xZvC),E K VX,Q+[Y`#"۽O3ml]Ea0s{l28c22̈́%V܅,wTHr9˜Jf>HYr|ےNYT .cZ/ܼ]RT_Atڦv;nCKQiْtA1Kem_fFw(U8Qa$f~S65 T{1u;{1N' n:M~5qOB۬zph 3Ec7TX 7}nEsRw' 8eviR]i:,@q"Q[gڌ9~2eS5LԞ߮R:77$7F6葚XDk!p 9F90sB/*%]`^ZlQfEZhC0 L̉Z5Lr nS }X!,j*@vzhlb-սx2B!pe Q>"Ri*G2$Kڑs+ G+ZH-R}Uh1G(qYfWK-;r dZʵ\ᑘս@Eh])أufHFhHEP%z 6Gm4qaDTFq́ [sŽ*όL*ɘfJwѢBt3 g?`!Vr=)&j?-݆ d#LƔP2T<\a+:Q8ӹ%J kGـ vdHJZ #s ^/%Md7 @ C8% _@dP}&޵>q#/{5ހ|ں\%oj$¼lndI+R ! f#v tJ0kѝ(~K&yE[&-4O fq45F̢v,E:,[TѺKR̄ ys\᫄Bj5x 0m)b!7yoLF'ȅAf-+|q,+ܮA{Qؽ55<+Hmmc Z1Ƅ:Uv"~۹/;&NNt}(BNP8'ɇeD6T+`e`Ք?g Lz[r)m'c_SM[Qe&!Kt݌gߐK'RM4ʦ:,[ |L'!m@X@-ӻua!Dl?nB.3V^8ґ:?.K2\W y&ڦw:ĕY=R% &-M~nbMo祅x\LL{2Q[m#Gٮ&N?>pQ0*\6Y_XQ?]|./6>EWR(y鴶YLdO6G\*qf]m]dw7B̩ns P,$rx3_x9!T Z"z6p2sh [G={`23vt6q%Y 7km\ WPK;p`80?OjzA޻A.yß4f}c)776u43CN.VpQc$^Fql )#JuUN&5x94[?|7KcEhSN 5ȓjdK 9c/X]hN:'EDVe"G.9gRb`D**Ȁ)\MSPUw0pzVˇ2tCľ&)!ċnur1@,^,eNC*q.,P:Y )I7L;dS_O@ /ʓ^D15t7p,CyX*JV$L<ȥ2ct'z>> imFAj>{/OU"e YRԞwu! QP%ǃWJal^b! ;=+ R`ܿ9@=kVQ!0&ݟťۯj|n+rnqZc\ +]VZ.м6jr/4i^[Ӽy.QeUe6//#)xUeiJ3̈uX?-7k\WEo[$}dYOnG[CòRxLOoZ?kfd!:I8*S3%Q5@FALt0Qʋ+q/*=CЛ;/KFK(2,eQbܩ,0XW$<J#!B~QW?&dOEYǛ_,:w[d2O2YJ mC*gipqL2Gf(V UhQ+Zg%ʫ2/rGS<%FuBi{{D򽝛gֈɔ"`(qg`YM.eEV#2J8XHRR B"#E :lsAYۑͭX P&{)blSRO c\EoXdb.v'Sə0<`J© EnOǃ"/ `>}5yB[Š!/Cu vfRTP֨ Z&_ym5zOT ̝2}v.aO!ZzS1Ŭ?`O>FS4\'r Fl3{ߊrcԲߪlD5xo#ʍjC`p l߻ubl&#upF%òo^qs|!-Fv3m;4y7m!*Z8ÅJKb 1PS4aF^ӓ 6\y1at,2ۆ.9|wṧmXy %B}o;L`qY;dy2RA@L&(<%HBBI+pd6qA vŌv[,R̫[UI 2vtϡ*Cs 9!sy&D2)*O?Aҳ(=P:ޒg",:nc0$V}I0}SYRze B%r2@'AFܞjjvr qDXo"n2= d̐%A[ܛ6xLo˔})WJ9>11 .oi{b34j2.ԈFuiO9 :5s-m-+ >c qDmQ;=Fv(Kl@ay.e_UZ{;~VH|vM{nޭ y&eSNXw ލ: [ |L'!턻@v=a=[MM1XƗh}(V&pW̓z_=Hv7!H.T W !<U㈤0*TǕ~*}ѣUr\U)縷XOajC9C١6ORɭv{IW#W|;\BEZp BŸY8_]^",R0ѕJ^.0rYLdO6G\0_L e8h&wFw7BñIVhh!8EҸLDL'!KX^a5;U{O>Xҧj\] ϋv\JW岨$+12R *(:X 2ì2%DoQhiARڳРG3SVu Q*q:`РnW ւyq:q

!3 xZf XUg~bj?jQ pmB|xl=x WR4Q$+J2dZR@KHn{CLt>ͨ[%9xiaR\U>*GԺ앚H_>RDWok2q#ٕ3FJuݰ_I(}9`&lcqQ4iP*'j`eRM*yBcQN '{ʱL*(0)}T>7Y.VXeB}4 ÙZA(*!hˊ)I 2sOpZ?[DJ+] sQň8CDr\|#AJ7їXx6zmS;*6wY}hѓqҌ%V~Rټ[HwY$X, C>15a?ݿy(q}ğj0/ucT:yӛ?SFejMbrg.{hN:'E8-D*\r FDd*Ȁ㑰\މ]njXf}3uzscW$ aF],*vi TO><_:1CzMk!Dǂkߘ0hKRU)(A&Яwe7콓wdfrf2ezoFE+Z[?tY>淵6>Gy[|WUIҬ?Մ},˗%ِEPQc; 7>}}S߄ 朽$gHK3Kv8 ND6\g&(e%(%8ҨP C2F(RChb& a9Gg66R;.XamS0g+}V=DsvVAi=moFE(/pAi&m?9wDIv7+2%Y$e1fIgfgfgEs\̦JQc>4H"T3*KA=%1C5Gr/fqk+@MY[^[q)KC}>^`DiåR*㸔Be zs)9h=y5ͧ{F֚KiE٥ "OKx j5{zs)q\'v)z/8. PsN ֑Ki$~pi\X4\H22㣽>^:T ֚KR.>1> jͥus@oN>>^Z\+Kȥq)x4{q)벆Kͥ4KCs4͎,IOE/l~wuDQ|/+QTb >HH t_@c⪚fۮfɢe*\KR̙&W ozKwT+Mh8 &o.@%qVFHİ;ͪnHmiC6(\sWVeWmB4*BqcTk7z+p$I'@Xޤ|) 5%~pL(nRj\Jp)e&ƬŔSg9224F`Ĵ ˹k(cy:1ʴ#K.bV/,2;DGacQ\8/ LN)gX&͔fR8XaZHRĺT-Q :N^Kl"W#Ag0A}5AX5IvEr);DJu=9q\tnFhΕLI:}Z6Q#3x7p5)Zb9> RV GIGc$u_u_yl;#̘82΁M.ED2i"PC=pf{oZy7A}}kAYݚvl1`_Jw!jfdָ[u]ɗGr1?Č"VNDF.^<DEq< 2 4g$fJ4(ӠN:+ p(duGYƠᎤ#gXH \rI.V (:XriI>I$z &jUATX_ϤƁq'UUoyDKHՒqM,bTk!6vtXueCQ\gr0SfϴmXi΍_Ʊ&e˒rY2C4zL >VT[5fW>tl{o?Oyܲ¤V0nnVa`yxVaMp 0oMfnG3>2)6K2n#q,R!i8 ^V$@Yoz/&r刜咖IK uUl}ˈ`0Nv v9ehBrB[bMu }dUb5VDW^Д]`Xw*⍓;i897[Ż@벍Z&6K{,*'D2YB~(NR gc#IY+92 ~?ʲ%Ch"HDJ'q;l:fHMg&|M[2z QԸl`6 #)4f:tX V& aH*f'MVSq/U}Vt[3G]ɳ&A:6Ѭ?6O43Y@ N?182} }n;6mYt(1c7B}2z6x'r\iYOC1Bоea<}EMFƓ{́'#+D #뙵/quYOb'YuQnp29%W@!8YlR)%4ƺTy+8J(6w!N$XI?2O-8{gt<1+9nFi^2=R 8DXi# -j̙v8ey$YuϛWϣp;@~E՘avO>Mm9,E Xs~h/ˁQpX5Vz,u:AjW|HVu2:5@Ju aO\Bk/e#@ `[F!)45"mdY]%JaVB5[\zqpR+JwQHD`.ڊ]̪ӐYu2NCfi>*T1 Ly"LJ %&4Mv è6i&`hlRqi: 0a^̪<]׋l߲?)[wQ+^3ǽQQcuZсJKA5Dr*Ye9CLG o%ScuZ#8kcvL]|֫#7GnsS`QD9]8JUڙsd!VQAN"_YFw܇wJQP@Vل Ũê~mRwQ}Z(ͻq5M3TDƯ NlX爗h;LJo@e2v/AUeɔvyf-}ŋc;jCDلA_GT7:r42 UP~%˰QCwηn^N+Z@]N<_Bb:1 6sDSgexCΧHYk1sg6˼s\Ƙ4 pt-yzr8xsӷ?]5pݬ +ǾKr<ܮ/A&ug16 X2ƀF$A2+%22L1 h9wqx40,޻췏R3|8x1<Llkq+IX?| I8{~NpgPw?z% :0 6_g~ƾ熠­HPn6dr4< Ar3kcr „Qr1^c Waf|4\vpܿzgB엿ק?xw}e;Vtx"p&:XNפruN;dùꄋREk#Ѷ#Oga ;*4Gw~w0w3rO>>4͚{W29|'[/N`}&N@GlPOOh^\.|{Qӯ%sX'[# h˞b\\LӉ}oj,}m`r0}fg.AL.r"a _a hYg&cZy&Kk?C H陷p9@ca9v.2y&'i՘O^'1l \IѾFS9'{f}oq^.UܨMރ\o.KY }dYgsmX`R\%ϻ R^A.2 w(PKDnWw?a|Sth|{/苩*?N~<V#4ph7@҅7dr߮_8P^W|ٻ6$UsI6d0NbmwٳĻXO[$Hʎw3| GPdİ%rfWU S`}/on{=NԔ̓`*?hb-AXgur3W&i(,r WLⷿ ></Fc.Uy[ZfXoIG-n*r<~9M?i `Y+ַbN?W7eeʸժP489r.P1S!ͯ+^d3ArXwuO-<*O׶],|e~ R\\lp-(J^eѡDo{yhfeg<-9pߟUϊAY.;߿yoAQ]v՝7ET ?U{ZKvxOn!}b*85@Z*^ }Wzw-5w&#?H͒Uէ r}ddsF2WtNڠH{P& 75$ 6)Bpû[0S O<NE&]vaojP*ߧ_?L?ogf O\{2O9} ټI@CDxܛ x* f&'LDBȁj Ѵϐ}mIR${dG?YS*'4x sea+ XA>Hj;yy$H!,5^.-kE=p'%EU|Rxʚz31V}j.6pBʼz2tž I6BuY.-Gt 1Uq . 4e70 >9AqQAmΊ> #R@ڄHq"G$1Rw\x5; 3"٥P55c)<fQnIn^ڝxQ} ӽyv!08T%8Q9kр!+>gMt"l7"3d2C)3d2C̐jwlՄXX!)4!"wV`:zR`e0OPvjv|˺|hq%\` DX%ߏ/ 4LZi0iaU+UEX))=X3SτJTK uѠ )E" -mj%ma< H*"NփJr۲JZNzPI#4ۖڴ_G.g=AAd"GiT\5{{XC/oL}c3`q$h!Kp0e pIp=:`,jOsdT`6mУqֿ6_l|e]_\ξ,K}MW5):pτ4} LdEJ/xXuXZ i+vHsJfb€Uˉ(ZHXe !Kyׁ2&A({pD$H1qOr)e8`]AJ )0Z:yNL4#DS. fqX&LS`C"r2 %FI_w999 ~F^@Qj k|4|8W*`LbZE5FJ"p%olZq:(-%EHn#x +5eE.rB3納Tsf zJ3({KZk=H`8$4 V4u 0[c;(C]iEWvNA*z3- S22ML/xk鋍=[6.R0_/pQA ₳!E.UagGd?Zw?d: KYz(T2ٟ/o.z{{s'7VpcPٻ>57`JYW: 7=T"K%`يðQ,& 72<_(~PޣHJ$ NT%+b8u X@S$T[ѦDwC,9-/m.ITAxrU.hQ*Bl`  k 3hwYUqЖ.V1pqQrš PSYU( X "7f11Ac,$nc_ioRL["x7~ǿ6=kW-읯JK}ID}ũ[]cz}jVs[gJqJ}iOL͹{Ba-킋%.~x>tNyZ02L (J>,YYtU?4Hto\Tjtd}^ZSnr֛嫕3`cX@L`8 J!v ݇Vh.Nhew #XEu?VȑJ]2sc~Ks.Aj1]ͧkf/'/*nO!xk{#9’?Ѐs1/CY/9_>usW%x3{`UtkD,T)3P7| xG $"[p L^>r#;Py}; r 9v\w_zӱAMǴf*gʧp鋆W%osjS =7}rw`X!mE:|,o,u1O[4\LHBV1j&.>x݀ t2M8s/Sh @B'O+ժӝmEpǶ Ѐ~g-'uy{{,*5eӒݗ7`aih@nqI/H*:?,_y**6v;&i"/Lܚd֭uf̗O$BcQд49$|&_ [ΒZg%2u9{9JjVqKk%5#y SG#͖j!w׌7H0ެV6iY~ yb?6~lwc̕:?,qĐK;o(ISn3ݠoChB& |=ksx"hV u/ 6ܑh&Rʫ%6c5dsN5LY@?әpn|h! *5^ 1WH#Rvrk(2ǂ^Eqzkm,7ʶhltv0DHtC#IĖsNsb57:X/vNHwy4Exp]Nδ*of=3^~._6TsK>(ڒ.R{C=J %{CJ+vb77j @jsKOxOT9C9c>"%+Z#vu5wŹ{ԁ)[# f1o [\/ib2[^f} Zy#I$1m%ЏࡨE,Xo7Q8%'c:RcLWHJKTS_ZCJ*8E]7HFu%ot9 +$%an9 oְ2)vnv6g>>O'a<9s5,#Vdg{20dtKL 8~-,(JiXPN0O$#`J$v`Y qVպ5*iRJYJՌU= Lf}Tb8Qh<ůᄦ z:z dOl?yMدpe$jE!L&2KpyBK:\зPCl׋p}>ykݵZ23]WƇ^Y c@pGZ"PNg_ỲtP[ J܏Gd8 I2)f#QCf+R9̐R>ĒZm1(*JVpQp G0̩#1! ]LixH@s3`<|~C[zw45GcPoZu:JL;H45hUx it{n cm|{#d|4!c %4 LSvWa1zH%Q  c"9’#ཱིI")wd, >̃xu(_\h{ݘ?2-#=H]ʴμL+J Ƶ"+/cPC&ef2mu7{63 ?ElueIy5 ]~l&|L/;ڽ^BoKjv;XkoBvt;ʮ_?WfD^SY]t#"*%]ZTsR9Er@I9x2/.C G&6 M1!@7J@-t^Q)EArBՎa]y%ֈйC U-V°q̹% +V!7/JW-P"srup_$:l:ˌIi)~GEQA: I Ä繏5I 6VC 4>NR~,>UtU[sV@#=;K-@ơLݥc:Z0sU18{xz`=g6Dzͻ/ݭ|*NE%\m3x,߹p#L{Y)ZPn^K_3WOf]7 f~xx*ƺ?HήlBM[s6?޽ [~-=:>xc ÔdoIwBrͲ)&ny7l6w trFBQ̻Fz>,+76%kh]9uWr:r̡mP#WOrx1tӅVR+pEoOl M㮭6Q}Xq#KSE%Õ w&a1*רTHI{m젫Fbml 8ZA{1`bz'\k >wg+$SNVb?)$νȾMD.2QSU҉Si{۹&1t1]3UL>Tjg960& KhD(eVO}+-Mᔄ*TԕRDs\8*e')fJ D9lkݓ괋VLxK~lݕ2Ew[*`޽%hx㡗췾|.D<#KyCK" lNBc+ CKokiss }`4n"q#9A0r &3Ej Kr v>B% 0d'Mn0ld 9Wj x<,FO' % &;/,i4H}%8\*較Ծ che IF([(&O1g4tCM>eKyE h"yL{RF.96d(JZH.E `z76{T-}8F[c"Q *lK ȕh~tk6U"uc%\5<7Rr(>Rrq@-OF-/YJQrLa_%sƄi RԺn ԀFg.#4~4 =ii QR'汀~'*{l*uEr^ بpIl*sWCEoaFskN7y+U;T;W`.I???ЋK7]Md@5_L 80U=ONq֔┢uh!\<1=ݼxVEj/ݎ7Yᦲ 68^rzdٻnYl٢ ZpL9Sǀ)~p67 RQVh# n#&s^v,Ƥ%Wr^||X>s~]ybR`asc!n. U˚Op[ qtW݇J޺ )OOm{0mP޼| #\n9ܬ֛' 65K;4J ֢F~w=oLr/<}p eՓ Ezo2 $ȭ-#t;tzS{i6 z c;[Uۓ*d­"Or T T1O>UzX^Zi*\{lѾ><0cjgahp\RXH>< JqLGzU$iNjs+8v4eJ}bƹO* LJuP%Ȑ4NCO Q!Q S@oSzH{Ai0'm%Q 7/ Ο5޷jP*S,-7_2ct,գN$;cb@0Ġ-պǦ~M3^}?3cC]u]MJlE@i;Tw G>]'NzzFx`(ۗ\D{(cdk' ž*XX 8SXLSb_nîXTW_[}ȹXU4jf$AU3I r.H, b)mD)݃Vd%N?9J 3^zqgn^cKCK7E1K'xWr&R5nG-7đ:AjEfuboBNӣ ꕭ}3GK6*o$V*]2~+mr< :>_n}rj*ffWC^k^u=!gZcK?KwYnsCܓ|&cS &RDDtDA~#Ż }K-{ޭ MM =4LfgHqQVdRQ>*k`Q])brrswU7^B\<]ԚZ.qZTfRP ec2X:%tGZ ht 8^^nY5d?Sg)bKƎ;}.$xesBԥ$AjbLQP"1mYiA&=s ^+DUGsjK@\NB9q @vχJ3|)#)"G!}vhty]N[){B[#Y|y2oXk'3E Aex2SJAؿ7TI)p$|8kR4yXX4JŃқ&sI isw8MSa9zE2$ؑs8͕ LO6-v@+}>%mvVHЉE7Y#,7MR;yfH=NzpO-CS;S}^uv2l.Z+hd߆6Ǻp"x4Ď. -  |x4 Jw+iTܭYqEqAb{(`;7\E!b>.sE?:N[QxkSFZ9(( 1' D!+qPQ:|( YMA9`2M)\U1ƔM`Jqy#K ¤q`$aHB+m@p]y%[E05=gz9_)%'XcGlEB#+iZ}dkfV"XNjJ4C"#"3"2#2{aY]PL @%SViP"s i0;[r,ˆg7Z#ZjܦovI[fcIJ}5W次KOl0{ʘ(a/ @T,wDĔ2irP/͕+[ID !F9YqwJ},nNyAܴz gk[0x ""5,Jwʢ iS5.>4Zʉw{5|CݮNSf ZwE.> wuA1xN_ ^c}V#\aXGt=;oӋ&ʎ16;uN|#b8Z;Aս2niE>|IH@?(t쭎486f z~?3ѱ\$'e޺9DqO3.OCғR$Ď!Հ+X3NOtBa}ldߍk'L J핅GX9]Q{A N19F̝bǾgߩ39Wq5 ~G t}' hZ7+'Tht~tu޾՗haukM[wX`p3+Jp2θs\ lIX+oG9U6|japT {ᾃ /zn|>-NsΎ|?@͕ )2HbTk,aΊB+&vG|~Oş\ff-E4WL̆xoG3\|\RFm fc&3&Ōu*Y gܦ-~ˋrĩ.ui,jN&S+n})@jxJQ{޷5k%)g)\ D`,Z:%2lPDo]r޷9i1oGDfds$\`dLij NDR/()GHZ#ZD_/*qv>yM%TMX8˩' qj0vpTy0<Fƚ\̍9ixLW%v{3ϟSnG#O9Euָ.)k"/'Es$JFͰ){;8KT;AWS=L"]tVe`): ݧkb_f(D#O,RHέmykr.(R^S͎@@X c{T`Iyz9NF]HQ9Ocє=}p W_eg I@!4٢Ԕ l>jKCoIU>Pig몒הsy>ԐD(qyb(L EŁJV)B}[dFFe>]_z/QVvq'1M䗿)&[Ų/zy&M˛^.Ha 3Q\֠ h8 \.u[96ھ2`ϳ!wcRkc^3K?Oĵ~yE;\pl3F8C4Kq*2y< H/Y> aeP$X`>axM|n˭j@pM:I_ϫ.UyD-cb!aNcᔣ dm^X`=,ɑTb!L 0L\ϥtWMO7كz3mqǼH6Y,^~T凿}|銌U\__"DgL}'q?V |r=b)`UnMtPtd7X.":;ٝͿ]É1@&A7$Ed.kxQS0FR\,.qPIb$OH^EJ ex,f Y2]]tעvքy֛סcR'SXw2cBb*p4:ҟBj d@$1㕰1fsOcQy`U &W=0''>%O[f?Mfed5NF>ɋy~WLT>5Kjt-MMC̤]ԿirNcZ:mYm/˷'i|Y̷W*|:bS<RM;^iRLFGJu$*SG*vsKJ@W+21\EXz E18ɽ2[u{cŞc֮/뱐Rj]CQ6N!Hڌ/SMG5e bcr9af@T|"a N:R̹#-J2p 8y70urˑ[ClU::VfT l) gK4-=1M9{ѫW7Qn^ݔ sY.9r$ѹǖ3rx΅S3s=yDLz sE`ھrOEqWuRCZ=ԉ5BBȉŠ5sG ,Ddw2- l/A}NU9R `!T1%݁bH*YMȕŖ0zĘHLa,FZ+b߼4 POZة:)9š&FU֫0(CM#]X}`1f\R;OSl;#-6~0s_ƝXx ͉60^wYSF3\]2Fϕ92<@7Y>}WgO'_N[lXCx@uy#LWL$iTۈz[]Ҡh1Jadtk,`h0DViTx9aƆ+@nմ,]Mz8yAF'$ {)ūMn`]2mÄmeG㑯&vdĬcReVaIklNT|zT_Wָw7fvCx$TK7ER2mdgEqv>qmT;=<.ϑ[Nj˪^׊ھcAye=Y؀4Qq+e7. /+ϯחm`{+vK\du!\Ex[IDTiwLֈ!8 @pD|[YJ4WAm jULO||n,Ⲧ T`x*MX8pj=ɱ;;iBIu񹱖TJ<9)r= c/'|+SmV@gO+aoFe ޯ%4O+$砶8RI>,`&8\}ع`QaTN}xEGa?W-1|c"Sbz/>WmW/ 8@N㆑~gw\ !UYDYBe)K _]sύ5Rykh7փ&޽c6=<׊{*D.XZ^i%[f xNmZv2ld}4!~:YhHol0"%6F{46ϹGnd !t+9kBt]U=Qmz$-k/ hj"$ p,4'qyWk"A@ -lI6*MRk7e#g'TR 6$"V_NmC-4ρt ]%ҫ" =aow5%+E*Ju:q CS/ cL̠#">s.V"{t"5l n:&qc1#)1F03D=DGHa8d~/!|Mp|IW[ڕVz _7)CR{3,əꧫD#BS+"X Zs{;C&m&UF@0Ij@yxHҰlWN0`jTA!a3::A5;@EoTL1Dc*j*6%L= 50bq~""3\r+QAa:[8*D˄(kؾK)Xp-s?wUV<4 y]phڱFŇ NedˑBG3ry3e 0Fh:? bl/AozAjC58LTysN]bTbdVb4XQǹs<%T D|,MTC*4FsZsqnt٬ͺu(cϯff뛛{TC0X)ċeʱQ/)YPIU]ZJ=b H'1:?zeի>o@łtZZ>;# w:|x④?Fq`8ɈWeWo(E:'"̜uNp>A5bhFcM^xd)Xs>9_cyiu"x/?ZzY|Y>(%zjok;۟}EDC=wA  ;èKGp /)^NR&Oi$b 8}gt 0;ܢSzꏗZ'lM  IiT=NGoQih3*a`1d68W  ixK͈R ]yk$yhu`9K"|fݶ@d-%}B+y*)V+CC~*ZSO~޵nLN4թ}GvB+)g= [+W'qX\0VAԾu;)YBևUhJ&he8K3@s\1ɏ 1Q.h0*hJ@*Jh%Y#0Ffb ΠAk TJ<"w1m@ -sa3Fm p h%5[R R& &Ez^M~ۻ_>o3`KaG݆҇VLRjVIC: vEb꧚lIQ6s#NCxaJZy}^)O"USNNBr=I{Q!/VX>ěp&?~AI2y:/?}p!_nmNh͛o|槛f?5+D -1;l\x w_\\4>8.+Ơ2]C):nyZ\?o٩mZ&1)xV&LIi 'ql"slxi(|<}f;eJFq$ //PsU77 M(|4kާu.vz{&wxß??~]rC_VeVw}wַE Dz\~@U*씾wZ/tb\;%1,A„NZ"< ,1$ǐ7nnYo\ x0 exwecdԧMV(/[fj#//HM Y)oE2nB;B d9K9Ad>8)Q" !AFEh7ւ򙛀hLzѤ06IH &Z&TiF[BȢՇx?Po cWhVy΋dSBF?DHM/3HBŧ)}Jvm3Uƨ$&u*U*fxMX(ju}8_Z[bdF]#}eÍM2q:Hnm0>f;v\v-SZ:o B| BxH#P\w}%>tj8^\M9~e5wnk9aY'-!ȧOnKJH/@jlh;ٌ띭G 0 Q1F]x)aRa` )qa s&1تQH*LIN +1^sde7woA㤶 w7ڤ_a!*N7y~So6G_QOUx$o¼/oJ5>0h֤WyG^ILZU1sdi!\51ifyzi @N r_|O~%'ll!+?A30u/_t~z:_dm tfE}Y1BjLLgx8#hĒƧQ+r{,{٘,E6QxѠAav$ AA0(MV!)hc?phC֏TFli%%4:sJifT/v`.Y.{q>'(rdLȿ?Bv\ǕnO?GOgzy/mQp;y6tFQ*^3P@;y(փTDuL"1u(Wde}yyz$aS~2R~]5ank"~MO~De:`TjN;/.2BBt0ѪJR=vB=3|lWp6% o>PVÑj,͐ߖVaYkeL0Մ%ht$z 4EF)Z> ^һ`_b{A߬y38'ЙJ/Ek'Ttlc-4!򍄯`njfPS qp<1sȉTz;_ϙS;`*ؙ'tA(6XBǼ C'tp 5z+4*z 0`y,M2_t2InS%-j) a ^ > 4Hh?{ƍ_¥qSR6U&5YTHʉk+4tGFYYgx)jmznY*!֔}:0 &vs zZp: 0KP95Ygmsb+Ĝ3}f&/LH.*ŝyi_F7Ň߾@!ݎ=B6Q _>i?-F"bC\N4/%$=fydZ"ȂsJr+LporN]9&TAO]~.ϥTvZ*{ރF]ara{EW!J#6 λ@,(ECS)ϻSiq1La^$JeR-hgZYL/cebdѥRf].FtHJ8zMJeo__||>WǏLڟiu<֯lK>9SORJqBJ^p&=0j1UDd \3A>%b(qg#Ą9>4+E2k)*ni\XG0%\T/h{lL(mjXϱ T5B )bU]uV'˩E-Is<2d)%f 1I;P1E8,C=Z4Qd@Ug (mTij"څ1N1מv &$SЯnFmL2EU."Π},S8tW3A9WYL@Ʀ/#D0µnMtWRW2iH8 ѽϕT?WW+AUE7Zh ]3(ۑ(홢Lpfc@SbV8ɩ2.zM 66ɢ]Ԛϖ:qu4s8%+'-^r:6ܷ$,YI)lFqt9gRLPZaRJ#.<DqF8 5C @bM]&B)o-@X݈BqPaIH !D魕9s{)/hFH1 G$0,əO!L"R"ڣբ<s%."0"&!k'DrNZ)k @T2$Y1LSg18 BCĸ<:ĨfGP=}2% RŜ$\ 0B3ڣ"kj6@ VTR'a8/)F qJ/#Δt%$D{Ruj8Ӝ3tJ8VZ35Afy7rJ"bo\; m 0"أ!FzBs$֠?̧[ =:1aO0ZS`hOP4ۉ^nf]3ݑpɏ!z6A4J~ɥO'7g#F 8eMJaI.G̀zG>\a0;rtG*a :.'dOttdeF|WO':Y3:+|tBOHnӼ4֓~ b ڮrIJ&j=@ dMkSb qnY 1I#d&}˚+}@ grmlSbU."Jm ɵş]^dje4jBOG&mÃ7 ]pR-Y]!~+MfZG{Ui<.F.~&sYTO'V>߮uҐ.z5^I$DRNk ´#/H~ڒ"S?w̋^%3yѳWu,We!/W~š c"DMFR=݂•\V{ ~yw_ZZ6om|WqҴB^E?T˕_,͚`xl S->:z~b-Ȕ j#ЬUp8^F4j2O']1b܉k@!@@Ā ZU{ MS tMr ct! \2%\wzz 1zzgbB8P%G & ,~DCNÆQ1C?c@ma͆1Q P_BBJ!y~}AW6Y=s ğ?-_ц) 犡`r_ BU7ٶ8s1_W7AyWF(r.e )$( k!r`XcN޵NƂ^+奓(w<,A$+; =X dbd)8,Odzjߩ~V3fYjfN8H"uZ W&,:, :W4rNCb21oQI텲\%%;@ַo5c0}5 4@g٪<,~m ⟍S[-lp5BOFs΂8\Q 5y$Es=RFѐL؇FlDdHO4QԹ\XhRܓcA^cGՄNЭȠgrydmZ ~|ɵ8YQo h g| !G{$~pLͣ5xh߇^-&vdzD?c2G]4û^oz?e:6+^:~nߊYo [6hѧM<{w u|BT$ckK?8f72mx̚ L2D2&;s پg5yGb>uv`9>KfpT88mNӺ*|tZ ơv :Y0M<0/Tz1Sd9=虖FHyԡw0>DF4(Q4"3@iV]r,`"*FBkgmUf[A} Ck[dYT)_8bo~|>2Q@WX<<˜ ^` @F@aͅ9:_I]g*8PF8+`uM=}n@,ʉg%XҀXC $vٚs3R!W ϬI7d^_]E7o.߳ 7,`\R~*m޳G;>'Y{2Oc/+h#u(O5 %~b@aI[q05OTKt) Z} XldRQsM.]-&4XFRw,Ntb>ޑ"F Gmc ]7]K9@['Ы&r8Oh2o- ?IT/={ԏ(ך)8tVh-a]{QKs5S-fO_g #og*|fVj4WoSb?E?g?|h1X7L?Ѵ=X&oz&/A٣%$D~|U7§s|c 1wWc er85 K85Q$)D3ThR2F&J^Fjz5rN{Aܗ6k竝S/3)iЪ#K&'r+x ]T$!q\lМ:-5t`bWQ Ѿ?{28EuĺN>^:nZOjK AhpDI,.TR!)A2%r>׼a 6ܰypB959)'Gã$9Il)=R[B~"nHU9Bz uj %("yl5'fWՂ#V dp!Qfay#j&_Bu%èd`T&9gf$7S@$IA/ tyEb1F?>B۲&iw o&IQ;uL_KU_ဦ ^$MM rY0ٰJ^ϛ;Fܾ[]i-n\Z7[QMHR&k6I6*&• F3o]޼g'wcKTU Ri( WII^)K2yv+@+o̺9aWf]d-Zl;͝6@eڼ' 3E wtT{弣=USY9E= ͺ!No[bW>y ]xzY斷a/ܲέ]Ro G)_w^X`^2KYeLRƤ"rv Ǵ^BpidПt /-<:@qؠ;c<,Fڻ2H"SVz +u;ENaQ2!i%c&T +9z Ify)DY9Ήp ! p+6>Iuf#y3 $cM_{T @]yþzb%FlnzǧUh!*\'9m\]5#%rQO+mE$4$gAK T+[k\9u֕(BzHqYYIUqQԒr>WL&&u6cީgFtR=3dtθADeh#9uY#*"Z0qHjVM;]{`)?2֫l&&nqNMg^g ۇ@־N"21U|09|/,|c،L jYPgC&|leQ _%z=W̽E7Լߺ$i۸m'P}y&4S[ꌭ,P=/r櫓D[bErn.^I/E#Rq!g,$EKp.)]]clt& 턡VaA Zul>{1M?hmӏoGfe=ãh)v=,fU[kvB5֘ZZA3,NT:KHAʈx!z3 -)E =bLFR UQ0,PJed(X< qO8;= -^v U8V4(L_=QA {{(W>$[e2%78zi(fo2:< .rKG̴3׽~IZkRuľ܏%kK3HukO>>]#\PXP./@^bۙN$g qifVg8 Dĵgh]wI_:b#@ftT!Q ,P<;e4|dva| A.9|?[oFs$AXYnX 3 t`Tp» .e0Kث Yp6MKm+5BNſZѹvCV Z@PKKMȂS.5(C`ZE#-霪zkOxҔ1%lh#Qخ~pJ"%hRΘG@c R9sNkXef1&^F%ħ~/Wc.;~ýh<8n4!b@7<;)?j4yg8l!?9\1_F[.;ŵˎY똾EAKӋX(]:gIyaҒYb&r=dtqjers){WdQ.$O+PP9ٱ^=NiabN!ah !0A:*#˼iZ6E_ngoXӄ);Vxiq[aP%frz*"θ[sjzb㺭]F Pe3Ox|s$\hCQ?Uޤ|ֆ`cɚske%TQ㭽g+"w=gZÚC2.L<["ӾfN<&~f~ JKAcX,?p'} ZM!;1?WJm,ә" -0j}];f! Yx֦j9o6[=M,w.4 oBqt:oWZ>ݞ}$SJSAj7 ERXE5z_^. a.Ա1{*j8D4=H0^aH*ԅ(░ ڃ pSHmZ׽&6tqͮ_<4Y/h6TGvf ;흑;h8VYȁH6,\3g`{Vr4wx~?>VY~ WoSp5*BL2{I^b,dh[ƫk8. B?8Tߕg󝈭4y;2QQf-Y1ޢgF+ִk2UU_vsP?N']"24xL(Kl:µId+xK hɳщ1FR˅kƧs.PGDQb4u9<(w)n3PqYJBWKCxzQ!];ynv1E"g,>*zk|յ4//xVẘSTelYyX35g .xebκ F!"{wA Q*&y ս3Id90yp9ϑhݺ7!q;8yѦlruA ߸^%ӓ0ǿuY{;6kŸ2ܩ~HyENWCnɐCM. PM}—<-{q ζ7\SƇZC&fJi5>8J|v&4ě ҥ_wwܫzɍZy;B,.D"]o_vͻz$;EX ;L ́V7W{ v n}jaf㳷~3۶We/ܜ!\c\_O:.lDrй‚J,L埙}A" 蚰z߄iz*<3 f1Qˑ iOPĘ`΀l %V"RhÂ@JRC9fam %IH_I5BS49GA]PA+!pÑ NpA`U+ `!(E$rC4,kOxZuʂX˒ 6`۞AuJ;'"\LGr>I)F@JˈUz@As D&1e?f͉"P}͚W4kne8L@+}2 [㥒%U)zdwjt,#@iF $D>DWğR@ JTxR=: C4ʻUD#Xt #74™HN2S5wΧ6DF2ZEj2Ғ5%=7R XO$]$-0K1&f<~wZhF5l?ݾKj4}[ee-lT/>]}|컺|Qh>/dK̓Ov{M:L="2gYptp3{S2I6Ew#+[>{Ru=h7pLH$0뷟#"%-y;-?!nDNg8"J 8foHvrqܷ7qf6}^J[%U= (qx <}9H }^>|@)T8T\?;[:nmtl<;6 sg;Z<;#wʼ2 Q[S-lc:U`:U UPI?%clJѠoO@SLHsL ٺEA5FۓNWX<^T;3;1"yn:f1l.f5^]ugOc[ie$ k~9HdpLpRB-Tm_oaZa^Юn@h8Im6,!,SBCN'(9Pe4rD"l(qTU!uPqQUDSgVJ'p4"R!"C, @?{n K/<lS)9J.`Hj%vu{CoHy#AFҍK`~s໱H)B]%j|41 gmoA#0GQ &8sȡ2bMG\=gHEKmP9 RN p 26zw( J}/+ c5Quƿ[c|xxD4 }W{%I뤸eb$rk yzB k^s_|MY1. ÷gɧy Q߃r~>Ƀ-`/6~\{In׹12)=@E4%4WDqj<Tj҈QR^LvYxr$B.;´3yz<o'vV9!1]~/freuT~DdŔh$%4w C0#2=J:/$VB4'TgĥcR94xGNo%9=h&-rZ3|eԤ(Ь[V÷ DM~1Xg M<Ĉy /#[+.-_ ?ӪB˷ }/FFeg%# 'о3)UK|y"z#nx RʋRr%u2%vf*b$YuOXK3fIa:& kSX3%& gRnnեez6%./5,tfº ʌ.K 2DSX 7Lȉ y>bTi^.Iv4T\/u&1\=KR dpg W.N91 +H=_(1f9⽸.Ac-RiF?fVOy/VZkދcfVbE7SAJ9Iqpl=MGUp(?%6}jznx=6ydz^TW޵Vj全wwߞTaɑnJEbj0YKl5:hz8IQgP"5ibŃ'$T=ߊ`FAv16N6}CeJaSzLJ¤)5R,ܺw{AW`1jX㍤f|t sZvNɰe5f[e$d P)>uLSxl.,/57BBM>"_c`fu:;oa)[d9{x+fPeP%?L6|/i!>E+8Щ4__W<[/}s|\\}糫s/|n+v޸eA ,䝛h#*,w.ݚbc:Mۨg)4Ezn;ڰwn l 6|*bjp{\ԑbꩤwrq,C= 2qd-fd%Z9ʝvˍTFj}v>ƑJ$C1Q8SAx)9hd!݌W`b#[N(1=U3crieoi#%Bd*FTg>UH*U*IVMZQ[BI[ k.jVFJ,^N_=ݻ"Rx5RD4+Oemݮ!-#xzm#gvS 5OƥZTZ+hKI%N嫑xZjaEpZ(NS8:f=[(2PG!Qtv{+2"&$2r)_gXmj):۸wM:W !f/<\Z{*4LhοߗwUOB ʞ6i@4wڣ{TQ(Jٲ :( <-_1$lEUnSzAj(:$T{J[!Ҕ@j( k$ZPc[Vۭ"„12$4!w@\XkD{`؎( /(WHRm}ʈQ+c-wXf.y%zb̕6Ѫ1D*sIz%+ j2G2C+{bw7䛌E@Q=Bc!uOiϺ8 Ȩ 5lI}K%S%R:Uh"E 췿( `w6Wq*LK"c\"\<fY6J9hytJAK$Y/:`уƷit ?xD¿$Tk  7,NM[\U\gz*/db~@Js tq7cbb?u0`r䮋&~8($v2 2@]*(r kμ"re%B`3iF5dU9EmkЇɚx!V /k+Jlk[0ApɂScNTՆdGփlJ%#EYᙕ)W"ms92H"6tpRu2$F]-k~ 0]y V.LԣtHPZ.k&d3QVZ+TieV /j-6 E")O1J1pL-%:S$޸, UYj@oY9厢EA0L n2{|Oen!mz ?_|MY1  K|F4OOއ'+Za\7AWn7ɍ}:3LJOE4*1i`埀4RgV|)r哎4/)1M#eE!W% FEJ_H+e ܱvZwa/x|J5SLPeYeD-K3 j!4%0S -s`"!$tFICQ%+G-LLEHO -Hd˜֘eû30ƆkIPޞZtK$֬ӸABC@t@|UhkGGL#SPbGTa"_;Y&rگiAYsđBL#ψH 5i3Ja!9o=N`m?f`[}DM |SoS~M! ʷNy{Թ -TF愓&%<)([,M ,d[-"$]Wjy=B@'"{8Ȋ[VV_6WJm\ b|S$N6SO ,ǭ>Q`y˛ޟ\Qs\v_ sP]8 *-kTl-U9>7TSo+R 7k-muUXoe$Z(<:Spjy?+ZQp;\VhgWj봮uc:";ǦKIбѣ͠^upO5JߖvB' >՟>;#;`h7rr3}ԧ;ͧ69 ;+;Xy2gN86*D \?^\~Kw3sn2ru%3"X[JC`I}U ]<`^cDF; j!?S,exF_X=ϖlH71aRQjF*e}5a-hEM^C@/o/޸<ŰǼ+{ ~w|}}"0 aHgU %ts2J*b+#D)@oE]ǫ0MΖ3hy,BWbH;ʺ]"updSgc$9@ "x繡N$q)jěռpay9ܹ_bϦyL:u0iQ#4H$JXY'HFqꄰ)P%osgq55]cJb9%mx/nt\MOsi<a]&g\|qg̿m?"M.U` hbr8S+W Ż*GTAh|Z,r3s ט"oNn?xX3;H0ߺц/*ḕ,E jCnI}#5/$t7Iq"'b-y|o/hVgBO|Z>O'7phfV5,ypyxy??ڨKڿG˜ !(фIcN2K{$1o8pEb:rD؂ـǼx0zh9R{ 62JU5dr0뜜K8Ě!n^8D=|gvHB/F'[h`=ca{ Te-KHFTXRlȸ2"2]vC5j;ލYm{eۍYEnWá:ѻe6JRp` ڤ5%@_cJ~'w?JU!"+o+uNU`ġnPs PuáfD-ߦZ{H n!f!Grqh_ 5u)4Xq"}6r.} ݃vL"/Gs7nqʇ?oD_k׳HoGO&_]㹅E| ٯL{c#MMab%M–KGwivÀnҼY\mGs}H9nC^9S1CZZ VISsJxuBiډڭy,Z$뽓(+\;v֩FvDi iEnڭ y,z0OILGЬob}r7loy5^$~jj49y‰pAғsFSa\ʐYU) ER!'`yȣ"Gw!ѤUgUPDy,P0fxظ` +"p@ɗ}̕ "3ƨ̥3Uu"z&[zvC}5#¨A#F$KcMٳ ۱o3o*N>}]6*vw ){ 4>(<]O׾5xU5{ew $$YME)A÷${#%Ϲ*'lB1\Sc Yč}&RF:5vȭ\j1?i`tKGP `ݱލ؇T\ Q "d71oc}~}/87(٘=~YKcRM "p*>bo|z2}D2#D#о!c<i[OYXtM1 MRYDڒZTR#讉Nҧ&fITrd?5ľScd< EŴӆAAG"Hȋ\e߱\S’Ԟq&|67[ rIˍ?4s]nNGC zHj. B%8O/Fq8HuVZ>0)O'mdir~ZaYÁ{q 0N&P MgG(Ѧq~;h9cˡv|k(f U0"4c_HUB"|'MʽƳMܓVuwWcsʼ(wWm8iIj$w\$s-^ }>\uB#x7px2y8,xDoe"6߯gyh?2'pބHY2\7UDsvX -:b/ōXKב4С 'Mx9rߏsF{ElCp m ⍒Z|۫G(/[a%Kv׈SP 9aHIs 4ۤO}І^*eig޹Q8_h)vlPhpXb_b6[b( ]d>+l7U=#tϘz&d2ReoŒٛԗѵ$lY$#8VǍgФlzDs IVH%epB좄R+!HD}y[3+ʴlv:;(`a"@9 yr:HBYj}n 建JcLyNd.%xۜ #dr& Ž$?Ds vx 4$Zxx0,Q LXJZqZF5pBjcgl81֋ӝ+`4TX n./fT@ 4&4ZjFUKٜvM#by*/7V+)S,qNReJA3-_,og\Q|j@ P@j*MTDPNNdL݌'hgJng䋼@薥/a+5v$76ҠmBJ* -*F]?qEp_R h BLX}}q|9)o>|uB)ٲ !d-o'ɗ $-@d5#dm;;a?t04+a8q SEuE Ae.3JJm`4{q]ܔew nuG^kjm2V#&;0M?Di/\$'`'QOUD'Q0G5'-I8i/Kr$Up/tU /+0>4*SG=nJDXٍsNtmS;8J&}4N!:C؝l^C((鼡hh ;:V'(Ƣ,Oo#5`7\ iUHIYu;r*(hBSRJ/&&}ylbGI*ddH dL aЊ˟|+|Yg5&4)?7CPi8h65Kd O } y)[HeQC~ùG= x.fh o%C&Ru͜xXE'nq"6]%fN(a˕`Vo$H֛ ٳD"vne0隡jYz֎q~;a'PDm{ "ީzآRCfxZ9W&HoFyGDAzQˆuJX)_m$sIqS_#gp^GfG0J$n^ I5mI`&C84(")sO:Z*^:Ht RܛN򿬀))!}GO݇]ſt 2o؅f6 O2_G!B~,7]sh@fs>/$:h- T7Tw6B9# 9%W8 ]ÇN(VCnth!}<0ڋƂ "{VfA?c`:c`., >(^x/hGR8Q/ * NA$q݅jqW%1;@1y#/iN[6 iq*y츃DEd(B||<(H{^8ROɝ,BFi>߈>)Pl˳8w3ƺ>L.8Հ~49W9eׅ8b `].Qrɵi]ԤF_sDasfaLOhAdr Z*”yN<8G;a z  p-rc1≑VA(X :8e{I>pa+-pm=1݃Y@uf<.5*=tvƓa^&e$= ~"hqL@WȄz4!  z~$#h E?x·KR㧝eJB:/uX{ĶDݺt`!&,(:6Uu^O-png#0y3|w3TcWcgϩMo4pO/Ь?9C r?.򏇛3f!wnIg?<MEPc(-C9t ?Pgڻ.д JIRׄ d&>C(Y@"f <"ֱId*37TkiHQRiR ͼ$hm%G!MdxiMUU^z㦗j՚2≼A rDz1i+*.<sa-6/n$:4uǦ PG^Ě+pmLI_̡Bxn^;h c Ƥ/xRbyD"q|rx1c "5s՟ݖN2Kz߯%fPxWV/Y)v$x&F7=*0e{Ε5?7`Y|hfXNj"F>,o;8[]0)rޕ$Bew~ص5FB%{<IRRb$m6m+#̈paBbM"G2̈ 9p !CN5F{#-\"H"T+aY% ] 7t)V/258l-4̍%}ؕK=s6]5ra5u:KyЫ*X@\ GΒ5`7< ݍi< /_2S YXo!Lu"6z|r-5'gO>\Bт*'a4Cp3N_;s3ypL&2×g֠rm5Gxrb˩-qzʥ:ۘ @xw7w6r4!$GȆylxE6<"X1fodΙ5LJ]eEu ̝<fU*;i{{?hvo× ךӧ[ Wx*ՆVѼQY`GJJx/%3 )3aˑ6h^'Q4B% *p+1h%p] 5N'p `g pw% e}PgԸB!=(T~j%i;b3=6)\&Rpjvv.ҊAx/oEJ}STz)x"ɰzq.K @dN/QM|i[qcߡ1ʩL{FWOe~'G^O Nf;ѿ СĚ$8 ,s~u6ж]nEs]7˔tJI6դY='Hָp9 L:..~^V,S2*]K'Fϫ 6[=JpK%p0F I+p7ۄ?H?ǃ;jn.~S-Hv4E>+5G_ܠ 3>hi]EsutH`9k8a? cAN**,a.qUXI1'2e%.6tx Z*I0JL@WbO9XtȜA7̖{O!PZҨ v!ˉ:؍>ּ g[yӝka75l pӌf5fP&7>h >.-A72%%#Cӓax°p)Q81OEn]&Zshs5Ǖkg;&UT>oe-z1.ە1 d"_u?/TdI{1lʿ`kRO^m2 %؟Hx5scۢx=.[ty^IMFju4lD֘gUw}HJnKgl+JMrWїs*w8!zi@[Dvtk}!hTR7v%<ֆZ*~zff85Vh1!cb #CCIs ?Xd^bš,XC-`\}5"sOq/BDM>'95|#[X_Q &rұFZ ])eg\l`**T:cr\%;'5*h10`ys P#.z1! "ns 7ҿim.NZ[mj DP[mԹ2F)̛uL^-0jS5K-nSk)L%nsk9;o@,( #6nsIc5XWJLj=ͬ8egY*߮%XZĈfPPjh4ky)~2`4~YkϮP4 I^h@5JI\{vO [܃W#S#M':;NU{%w‡V c,5 Nur\dSz=z(fJU?󏳠^W|ۻ&} WlJ<ъo:~%DD *3z?hB@zwa/KW< 1t8y)fXH.Wllf3W),,60b6pћ~}1Y&|MS.X\b*X#Ϸ Y,> 4uCOuKfJm8B{6@Z\bFidYmr -j+XظKPTjzlּzvLLmzm=f%c\fq0Se+YN5Y\gBDcFXW7)(b5yꡛS"䇋J#Ҙn;~|bpI+$$RZZv hsxm)}nD5W67(gK qHk;Q!Ln0P@9x+bA`(-HGsh77ywj`IL6g p:{ߛ6C%+˰"el]wqAPI^7HO”$H2Rދ$"&䴒j"h!J8LGs`d.&Afrj!jBRJHk0R2XDAJiK$qj`G1s-7>hY=Y.R^VŚE+kUNb.m ȱ*%鴔nOJɚUGyNBQ~rc{^reB H6Rq$$R3!ᣕԀJ$ uyC8Ϯ\wZu31mx)$TF9Xyv*ۥUK*ìPg&a6 c1 jUdA7,ӥ1!v)}XyM)2F%Scb #CCFs ?Xd^bš0&YUP*U:Afթ(ZdvΑaN)!G&0"YPU8(zjNg 88k[sO9)$N2DiJjC%aRF@,ᇵ$`SNqM8]7+K &t0:ZԪډ`GQ.'`Fs e9 Y=!-ݞԨ[g; onlWn:fSbS>1:/pauK,s]Qu*vWT1f]TX oˋ\|]fコ[3kk?wEz>~v,o\Y+7fU !_F-ٙuGubbݦ;.:NfݢkА\E;ޯve4AucdWp@<;@#āZN &HơCg9|9DUꙥKBQ3)=zLw =Px)=z֥GR].9ԚzMǤ~הBEKQKg7' !lkW"ǖdQH:\](+=+NWZ@-V(y1vܹ_Vv_~Jz o[OZMջGO^)շfu.7v&ZjU{U#C}u-w9\T)M3"|(ł.WO&ա4iDJ%T&~*SX^ׄ؞8O%c= 7h EA>kaLAMqWDh`m; >=U침19`Q'[ Eťg<5nClfǣ1tHy_y@p2 \qӋւK aW:u^'#u0f衺^,U0\sɋB^Rwϋtޫ{%p B!8ܳ!g ^-0쎏N5RR̳UsJRbJ )JKsR昻,S)N;֐T h0PYBW{Ґ)eF9- SJ% K$SYx.3 9$^I!h$e-XISvyd44RsN ey$eQji]nVHqd.A< fS<0rXp"[m. "LkVDV%43"a"F*A^\(RYL<E.l H;C,Vԑ@J(*(_P9-c_Ҿ |,m[M6'T 5U5tTZt:W* \j|:.ZUdI$O?}{@q_!D6YԝD[k/ڝ YOm/SƚlO(-;{N\Z]EIVOW'L?+Ȫ#|z,O\>I2}\-oZT rE H4:nMFd΋3yu1\ .lEYHBӀJLgKVb?UV\Bf(Lĕ$AKU$ rN)u-ckE]=]6RbzO0!*^sm{%GZj?˪sw B1ȯ\y^zA^m bq9~نrg\fіPf̞8Y M ? !rxޣuD7$fDӭbnQ*`-Umňߒ!YqX,ns$3˕)&~&{7' v%xH5TԔ 5)A",ιqq<+92Ef" @O"iW˔M9w& YtJ4d6ء)m Pe ReXHn3 rp+If܌XU%0€-3)3P0亐 Ɂ(ܐ;n'-.>䘒IS"Tj%KL,bȘi,%BVW3(W@rZ1@Rbmsp^_Qژ偀"5>ӎ]="RA ^ʥ?e PI(AmNzTŕ6=zymeJ 'G[Tdhx^IK±`78z՚pK *J{E|Wo7> Ut H}o${%(Y?0ï}  cn퐢< [.P~rSYηUHAQnwsEڳޘc9q]k AMxD'r |SE8t<к=Aj_KxL b,͐mh|ptipkԦ= <NS B?͆BgM׬!}5û!}dAe h7ӣ[b(j(Sި'^X(o 6cPHtG= N:yg"pqlM9˰O3a+˴դxKvEOmyy,8qH_.)`Z ӣ۹)jFrװ=P\1kIHI(Rd1*ch|~?-tP<o*1_>[OM OݞuS} Puj(UaˊS^>ăaNrxjtQՀɞMHȇ07;Ĥے(Ac^\Q5 5c9'9 @ 9&oCJ'k0-8&zwAbgEM9FjiHO)ڃzH״BIs*Z<4$Yy"y~slTϫ&yuxre3!͗;1bV"yt_9an# J̓L 쩯P$6!>? (elT| D0r5f4ZQanZ߁sqÝljW5W`A]0BfS;;`r?āPۏ8BB=蒏r9|?R$w}Zі9}EIߺ~[3F}G P5mi-7c*_(K7eiPg.Κc P}O m aq%%DžI0%c>+g2"{TU2פؠHLHTJ>횓%><6K r.\_x]r9_v{{(~qvmw(`)Z4"ig}UUxAJVXY,$I$D6YzoҒHCJ 5=m!C=HD"B H"bzbȇ'{g C! ;gTT;S"僈o:.Wx0goplm]z+NhB H"Vuq8ȀB['ޢ\p{tzƣs^׸8W: `kt1Svz6F)0SHʛM0X=]N$2Aq%ϛS&)ObulZ"ߟʉ Rǒ) UvDѤ\yK\M'zFuz35gCl|6yk`c9_Ryv{1LTƗƒ:.^X{zs5EmXT'%YeFEf\Jmq(d9B*8 2M ~5{3Ttsն%֒Ub6@c|H},3V"m1a6iL .B2F0z4 DKgJ—i&pc=HJ 6+S e2z`%6( \HM$>lP,4"ʵX0ƈ4>H_˴ 'A(T=B{u%ZC gj!mK99fVJyz f/qQ )(y6u-78跘TRр| ίncQѢhzu}Թ<3|oA3W-C$㳡v%L&oNOk[oQTi-|Z#.L"SxX@=RKÙ8&K3`2 ZTUJ{ [$es Y Ӧb!cȰ $"3rZ*:p$<=9|%}ˆN8ĻbH29Di'9uI$=2aXm_94L!vAIkvxЊB)چT;M(D=go~9?m}#4x&nh]/f>\/>_M6tqQ?_e3 =I60zVb#GWtNcݳ6j&*[u,c+TfQh69_y}5mӳnj;gm$m!BkH \mK//~dk  {-fS{|ut4}xr3>agz "l~{Eo N7۳]'~UL(I{u] %JruU +O&Վx̨7 Ac:(uPOϋ5\ ;cyJq;UÎ"'gkU~^8אҩR/Sd֭.)S*֭G@3NDg֭hݚА#WZ: =z9yGn9׹ujn$Hؔ.,u6R蹻E)s ,WW܃JWh2DU)8*qoér5t|cf?#2Jz&#EEi=5'4ܷ\PpHE߁tDat&Z!U";^>i[T_GoHs8O>A&ȭO'O7V.GIJَS W*_֛k.i4>Tc( 7k1,ǖr,e*D#5-lfF|BM՚NOeN y' nP0)jqT|I":*#J! vhSn ê4CL(+cf QVN!y/w4'联gIQgh))[.V2jSȆrxU!޼ 6H%H1$x>6"!UmJBuABug\sچ6 d ~srh땘JLie;T-"J.kG͕5HuTRJCƐX _%- _uGOsC=kݤJs՝^B35$HGbH!9AcƆE^$BYkTBRj =@gUvYƶdPq]Jv> Uk./\qUg^pZN9zx4y1HK& b 2Pͬ^@G$PƥUAx] ;E R3MnָlbEL[7u.ZnQyoceգlQU#< nqFs(B?'.x C7wgc <)Gkoǣpu?/C>,ɳ1'7w3f6at%X?-S> B EbI?=9sFO8Ajx䛮p\wSB\JY+|c![+s̠)^dn*U7-BR d^Irv)s28xr(d<[ T=RQWzY:V;p)Eڑc]JQ`E^ )-rEu9lʹ௉.VۥB\y6Q M vˆ1i۳#<rMy$bg^r&A!m''!3ZٝI(!xhGtk%OcW*CBi_f_~kxơS%y,$yTEA69#&h>EU{#Hwh"cR8Hٹf&SIjr؀2ל Ì4Rd%^NG|)Ǹ~$5㔡LrGNG3ʘt\B)ƒxԒRy% Nڒ:/)ר(';"E9"bRҥQgzf,#S\øiVD* :l{B 6Î04"Ty0p 4#dT &S&OTÆ/h"ju/\ A*5&F420T YʬIɽ %4CBn:2.L6 Tq™dD0)g$_D"Q 3.AȊ?  o/(n9qL3ϯSܔ2<4S9?[ELNn7qVW,\LLXuO"I(^Dn!qSRH+ ˦=49lL*żP2dR4q>F{#-Q HPpԆac*hͧtٮAcVrϕaaaDfp*P k;+k3*Obp ȢG\#xLjz{~(v;~g@}4-p͟F߮/ lu.4UO?s41^xZD3>7˨WZ>mam#~f{Qec+:WŵL>gw~kUO]Ul'x 'X3a׳i$'}~NHheSҋ7GݤG:82; ;3oE;M1[]R>UKeJ ;nwK@Ɛ! +M h/YV,'S0o>dw6MgYIa6mlre%2`ũ> 1JYQ`|vY>ky+& _;xR!i <Ȧ 2Ey;rvOֲݦH$fҮfv&nB3S^3[5 G+&k wxEbRBZ"M%9m$Stp333jJ%$3iBumWS;= p36\( JHk0iI)4'ԐDQO+)0"tԉbj,KYқLsaaC&xoI;C>-بؖқ巊ι:P9J7"r3c-JW>Ͻ[۵D)P߃Qm`ɮTvC5LrJNR "' %YUrNc9Nƽ SCaˈ@0mΎՒgM+RKA{ٱ2KЂ@u?n>2Ue}7)^uu+foW1!LԱa)|:l$ ~4² 1-7 1\qCG&ChwE= jx =ܹ:uɓ_nG7$NvdG)d(5SNac1BÑyWDe5zBAJ+,+[ Vt狖6[C#%;47#xl,F).2^R$Yc +Í߮9`%ɕ I *ؔpH+]FrP.,7$ a"cJ{Q4NR. v)h“3ȌyFv ,V[PFOfY3YTg0f^<᳊h3s<^+RĹ^QXX @$q^/k/>^‡h(ﲎ.(ﲎ.Q\H!-Ҫ`@zV:RJκRi2HAR;?-|}#er¾ >"L /h-_mݿWU20Yrv𪕾 Pj1}tDØՖ78ZLRi.\+icR^ԡ;Z4q4/J`[y{ݷf(Z,?|qrE+!'#*?,Ub2gUD{v1bGr̬[LW%&Z w0oeasP+Rʲ)|<71=D̂'47Q9hWΤڨʡ4X TޫTHF#Rྋt7>zj>͵5s~mG_^W ]α>MAFVI{W3I%_<'B $ZZhMw38Qf!Q8$LsZO%g 8RiD+ j>8R8u G4f@y!fmoS\dcVLca SKoQ]ݾ҂PZ Ҧ* )pDxxF6x@Kws ?%cyoRhiH!1C声qP듿UQ>]< Mo?ֱyqZn=aAGjZ 2| GBޢCعhy!3e˜ vmշCS>l[0#j"ު}E84)i6#<,ti?rZ$`lMR,<`hZOUbt\ m3+w?t/O^~Iik,+Y 9֪3֕>͑`㬧OV OXk1 +Y* < z`\́Ѭ;b @d1(e*E$6MfȮ;ҷ^uG6k1 q|?g1B^iǶtLl`HѱҔNҐP*!V/SZċk+ylޥ[URd"6"g,Pjr.Y"ey/zNo?ۻ(;vA(I݇ymo|sO4X/\>2i:}nڲ%bgM>cVhȋ"+4d=Va궂 ]]/iiARʙɖG&{Ι+acxic]83 #%'! LOPjN׎맏Iu_n>޼_|g5mڛoߜn_}~Uw<ˡzl{Ր2 ,(UWlr26߮Im'|ʀX2IxF!2[\HPTV) Ct$dMePI)Ċ#L1M u">#"|9J"Ks@Ql2-u&;jR$"-V/:;0ۯsvޮg*n̪>JtOv)ޠƂPoZonQ|!/1 xk_*bZA<}Uv.j])  U)}ViE"6-6rxqJ z ̋/Yج8ae ;d TEC1 hgn|\bD#'9qr6X08YA+6P䂓 c 1@wѝ4i᝕e{yL§cL8 wx 4t I+Pc i".4d!TiBpwd8"@CzDkrS?OXltgGJ.qJV9}=S_J<>f{RrsZk8 Ke\t/8[o{ NI$GX3 ] ;oDt[;ҧ'qs&giN鴠<ܾ{˷3N)0ZB!R#֣twEKkD݃b!5B/cC;N֙ߖሓf%w^c Dͅ 0/&#ʐ)ZkRlrZ|7&m~|g2paW}! LƗ^y]P6ֆZ8,ٻ6r$WYS$oad!$r N<뷱_Qܲbw%k7H]|bH[ h$ SqYJmEzx;@N*KOc{L`#맥4Y9%vBm:NtIK[k)8@3=qv("1c!҃cykyYfk4竹6!j38ίNel8ΎƵ!mq8zva0lJAnţɮCÄPm';? *ptȞS nPqG薹*LfaI0c4w%^ftopytvoD#Q F9C0I n JLuL)Wh[|H6} 2O'?8dKm1p<>;t݌s-YIn)05Vơ \Z@9|!h^./"N}ћѧY-> հm9~_/Zu=/3Z.x%pfZͅd*5ywo.x!wx!w^Uͅ"S[ 4 f@U/L)'+/rY8RJ.yH}QH}JlU&Mr{Ln$yDCܗpmJ`L dxμb@.@ynnw#0QKh(ۻ e̾(ɊBYbB%Yyx!}L&I3 ((܊\M,#S>(9f40KuQZA-h*[XBCrȗ.Fi )9o) 30.1+h,Ҫ$cp(^26| Li Web7`t&a.Zvϝeb8;UK75}?( B,Zy3(>÷L"E=~Id狫p @zeq& //;;Oxgfl2[wn8|}M타u @X=}{tuTOQ .{cC%_',4c:>$FUwP>$yȯyȯy5V I4r C9Y RZ _h,"zUh2+ r,f":$I}QRHHF4bm6 q*٨sKL5fnѥQ>/)*K[pa3Y\Y!?*3Ε Yf]% L[Y802 L;qrr2Qtd 9"ϵE{<7+9RVڬRz$$ Gѱ)-ӨْoO-Koέ5I`p B@-6qD8BJI{OfBC)LN@)۷A_^=&I٬Ӂ)(4vȼUN}6 !0K ;J ,$X·.3!\2`ga;xY%O?GIUY_ֻ9WφHwr\?zU NG/^\?^EZP}Q8Xc}ɬ%FQiQճݝOQ7~5߰j^f 6ނޱoZNIۜ+3o~֐!{|Wݦ$ȥxqfJ (K!v < ?/6zt}S1w ?fo9IGdpvyw S+m5rq9x\ps(jnjLa\wAaTASh%;sѱud֋,.pct(y-l`sdkJS6T qu%B'͘V%vP%?~z)s_&h: o6ә0f$.!6Jӛn!ZAvb}!ñ[2Mamz(CH~w aZB/v 2}[3}q^ļrs FSwdAaTShzԘ¸s B4n[nR?;KZc}@1y-l`TrZь[npQI=;[: HlZ)$ܶ)8Ry[ @HuSH/f;Gi+jEerSE9#'?MNV&.sV@Fw5B"n~SQZް[ eAݿ{H{\W~7rtr7rd܏sVRe%<VVYz~Hw3`#զXac/Ǝ-M~P\Y)ju"W 60E>DȪސlAC4G 0-j?3Y% Njvmy+R#tf_꧹VIpXp͹p Y PEs'4L6%3nKY^JC{@$b$x !Cg8tte[`w929E/\zgs,C4]1|{]^"ۇ|~P@93Pˏ=|~Ri0=mX>_\yz-Րp'n2] }Ƴ?#7?\f_~f>X'[BI<~{tu߅[4d{6u>?ǚll6zHxjy n{'^ 6aT*ji3Z.kNd }J%fu z쀴 0~]x*5Ĩ0_+[/Bثg%8/6 f}ͶP7 }Ϻ@ƫ躨ܻ;g)P]€{* fT>zDRϭ? kkV!EhpJ􌜌$)sT6V況c{p:jd8B2BBͷ|MNR8A;$'wxդ Je2U2ˬ+ :sFWs,m"q3K fOq#,odgN@m@eN" gyI*cp3^K IX=L&'d@ZFO1 A,u'ğ\j']X=d ZQ5:ucC D695k?TN=SoVZ{3ȵDd!\m@,2I?Lc)/d W֑RKßlwLdmvQ_ћIƿ[b )w\ qu!^ҡ5 k@dilA[`UDXX h59Hc%Sdt JTQQԂvwk$ȉ5>ri;*!Z2 zZ y*Ңmȭ'\jmt@5Fڡ7t{A,sO' ƶԳl"2ݡ F0ޣ1qVWGUZm3,Dž0ƒGX 94G.1S*Yzh2b`^CCHQlŧ_ ּc$v@#^ ! g$촀++7,؊keP^T W^iT4-! T&\qVZYQR]MUX¦}G_+iV!c(6 n;ۂ tO U[Cα ǎ(ųOB@̣DÄm"p@QY>%he[ .wy\0( {bą9t].jQxۚOJjl5?3 z{O:Rl{IT^\ 薃얐d`pJzDOZJzin8=wtgH+#~(Yi[U6uLeޖ+n|yy{9= :^}{х6 ڛźBXXp8f 1\M:IVldWq*0a+NO$EeX!Z0b 9ƃΘ͠EuUb\^f>1l`K/ՉO_ZP{ᘋϼWHˮ:x2}QڴdV^NJ㹒/p|/.X^^MS.'q7)⃂p9c;xc ]NBYwnqZ{De(?k ܍dAy]'>?N}47_ȹ0tgkr?$#x3$\C5r׳&IL!^IOU~w?mЩËT{_B$(E5dzކ~_\ ꯦԯ9Ht3Ҭ{{zCRaH='^NFPoK U$LqGǵFT4t0s_׽ R6;~z@׽=Nx GT磡m67s>}\ceձrNeOu]2b_P*h|ʫh& ?{yz5zkyW5(yWXDIQ%(Ԩ8YJLpDIc*NT$@pӡU\0e C\0e qYn5s,138&QsqJ F(,De$@@t~K) s]վ4 0Kð/ R^Ak SQ<'odR0'qw̄͜JCHGרnH ^CEA-@oFy b 9A.Uc*QZy`A1K=J:Dx3 c(aЬ}(h"b!8Bz9ABDPfO(A Xj nl*q ,8~|qp6A(V( :⽢=8*߽[} Д?޾Y}5λx=~ݡj 3 t' Y~u7t[4,o)~!Ng_/7/aq<y XYwwO!2BsHNW8Et/7 ]A'bDDYL0Eej$51kfȁTsזΥa/D$ťYe *A`U0^5c919X CwS2I)'%pͦ*k'!Lt(9YwUSi7R d -yM]m \"`G3JO4A V(\Mό %NЂX5x9ָ8lP@ib NBo@0 iܭv 4'T+3Ov N:獼7¯# ]R4k55tSha͑s \*#63H=u.m T"z\ GH9O64Qh(+fFb@nNLLCgXT0 a-Iqc|Zf5_B#Z Q,QRI_,XUw qstAKLe.#$tY#$;"3pB0LjR%ψ+Rlcʜێ($gIW[NAfӛ~eh:XAe:ceZS#Y*p"$JODĉZNdid2wq9\ ֢=%#(>GNUGЉDW  \Nدxv]v9Uk-G ) Hօ a)wr8TJ K>m6FpI ak"Z5x9"$8נNnv»"qW\/B;׍l@Іj[Tr ^ȝj<4k5:ŽPQ`@rŴ[^1c@Cy d-h)QftYyyqGapiHz7k-1ƋFx&tJNRSft*I3JK3ABj"1'RLEK)X%XܨӖhUCYb$0ϝMx1BmNOPK3iH]Hԋ$;R(xaryV[ & ?]e?҉wޮ|fogoz4AeD1}24JfqٍtJjF]L7. *Ʌע2:cw$&4rv"Qw$QwߙdžP8[~kKPzLK1 D!9d̎P:#T7e]}bw"?^DWp15:Io~zrIun&K>l>=f8^4Z0nݭڼٞׯ^U΋n6K"15Kl Qzδ?Ũ9i'CG ]9_6JsI lZg$1wl@B9^63ز7+Zyq8{x_|gPlC2<4BPZ)?w_W(*͵I1B-+s.E"[:w+.Axx2m(ñ%hZ^ޯ7ObHU!X T2ݰᾏҧ26$C]u:!t3`KWz.7Yh;Op4%Ber2Hf}F$( zsZ9-yH# QsECZ:m\=+`k-^Ty[g<+"LE_{RƷw*[^Ľ XD5[QWd\ 2 H2gld\{,52$e\uCGFƵǩ#EQEj-n&+BM=U+dP)p `6 U' ;Ʋl&[,76{`kzo8=W @&bx*Ϯ$gLK^ϓ7EUo)* ([Dj3AD%J ey^s—e @PA󭋍A bC:bXExPjM a(WCQYw^c{+\Tx@J)8WN]k^G%Y0<'XɥvT؈a#(l Z@svQzZ5L/%qK҇1.ܼtagS_"BRU:]=ԽݤNIZ8cqiN+q%܆՟p5n)W"Os'f'Uqjd6T3ڧ'Z2I,[.O@:Nlq8MzI<[\qG,(\K𚶹MC=p-}QRth%Bڍ Ao>K+Hδ\ILכm=x `Œ1Β9%rbJ);Hj^tT亭R*X_ahehRE삚3#j/J|KzT!=4%uIg$hF'BVEM?R^`E~`bbD|9#Sr ܃ i{4J6zfTWvTE4moDu ctr  bPX^uyKk[gjyX[=rXč XM/^~.W]/z[3GgZH"K.@(wu  pѯ]$`t4"9TϓCـhWTWLp# ajZfk_\3t[=YRN/*Fh)!HO\`lʒ1jyZtN$^{ˣRCZ'֞)ЏI=%[׋ji7>y*:q-I:Yvj%X*3S9KadUBˈZ ]EA)\@vgH]7V1?;(PH%$vϡuSѻH(^=%ƄBV ,KarJQSp#3('[12FWhb*l p7gX"KAU}p#`}!ڠZh5ƯR} t~L.ő۠@١ =\ސ1T:qU mڊNǺۊ7 伸5@[)y7J[6,D jϩqÉᤲuЋϷ7wWg^]~vuwoo]v8A.gpB+8gk&5G2 q:}C<9x%cƚ5 @:.q bL B,r pyKVi-XƕRim}:Bp"ZA0ZoZwQFl}{}h5N=qxuQ_auػWϔͤ,P*/J;QfRoKӥSCl4BM7 iFj06$i$Dpn:ad' "KVoKE[M@բ?` vƐSwH f/Hq2rg  Aբn#b'%eDcsV"5p0֍g ^(\T9BXģ[ U>x @EKtc&D!N׃>\U_ʽ~Eh09,M fʶ5Φ+H̭+W tjcMw{EMWI4(by45d,$/Nų4A颫%SRiU06"UQ;or8\UqIH*((ڠ ~?o1J#UH;U4(Qxq)g'.8#=WyFI."cZIR& `5:f#i՞@L1Ag 5%Rg==PNox8/\Er%'ЄYxH&} ~)S-ZBjdu* V$ i#Pi/0'VA '\0L(sDN;k4<ސBUd9fŠ ɅtP+ÏF~0,`䗼f=_ rPjFPE([|~7;f4ѐtկt(w*mj;`X1mP]*zFhcUAK,( jtݏӓ30CVs [mS[9l-a5Sn,R(<]a]#eX+L$_X74 ^S8sTpep4PJ033B \xI]',l1Gl x͋p}5%#Nܽ"1c bZ(bgNyaswQ %.z}GO6OO.Uɘ>z"ڴah3 dChMч]֌o4I3`|ZQ\/[Z&VI{qV>ڍ#S~#ŖƓm+gr>5)vK_ {y9"V]s` jLN̟4u=cM3 wȴ(ESMBt&TI(# Z)rzM Dj^>@6ڔl4)U==ƴ;Z׳?>QmNZBz=Fܥ >x3.*Ι[ssBx7?C"{o>_oV9!rH|Y.ǍMm_Ɔw&iWj>}zj~)(u~szu:mVrй[.nau(eCS:ÞX)&HBr!SuvTzB6符ZDz[m EL\ӶyT6 B0\.lU"( VGoUɂ57QC1t]N"x:1k&F Ȅu0%?O%YtU.T./4ɛ ̛>t T.uI3y]_QzIS/GG0VSޯKxJs, |6_M%CaF߮%kW}#6.ܯR YjӲV=}0&9 \0eF5vQT0 YddP 9Zj#7dJa ]7] a Q3URǾY\'ee 剹n:B\}Eg 67W&M6q'*L)9%Lh9f7X5y"ƒt:=|aRgToz{rQݮg2t!1eH~61y~FE _6. sb^9hr\XN[/0$,z:UED**LihYЁ9[aֵ!nd9V! YNYZ]vYam /#=7_|:ޙQ!<2[i6-99VPTF$q1X0AQ<"8:h+sNX)Hy AvmP9Ӟ@^Y^XiTKLdԒ%(rpVZ UO"̦-C7 cOuQA$IE) QLJ ШXڜyI !Ĵi87rJI&mD]ELL:, 0!ɤL2R*C3 Bķ!ʏf~& 5aVd+_/{UMq tyYWEuj qL'jO[Ͳ9I hToD9_:&: $*Z3̾^28rٷs^k>*9RT&vtJV+dPş;&µelóZ;륏\xUժ_B1 ,~ ܽY(ø4Jf,T|^yWC̀ toBj:=PF6@wDI$hH+ ZKh${BpCZV$.w?Q[-e7j~+بI-TjdԺ!k2j?_E{mor#hr* <MXZfS:}Eab V"&JN絅aE2xw~ubrwa {6p'!=0'vR@Ikk"r^99KF')7u|u,D)Ka>Gc j{V(_nB|zA`13˰}˺Yߺch83dьX~$a6^2[#+ɞR H:(y6!VSv&~U^]"΋?xJ~$+{`auPrd ߘ1䜽y2,ӵZ"{DfWc>=Yfolgb=4Fa>imXɷ/&;\2,G%p`3;qڝ&.\_-3qntFK@$ >WdR3hTp $h HEBkqI g#gGUXM(9%\E_/vO8{AMB}}iPAsɔRƾOFn- h)jʒ,LwB*[#b{j!m>Ok[wUPc<Δ]SљKcl26Lg#}r=|6 C jph_e5GAoh2?f^,|jd̻gNO-@I;9͢e5Ǩ' ΋ 2&GEq$7 dv֮[E :..]\r6...kf|w@On. n22Qh>=ˏ1՟oVCiZ9; RfD8 sG;LQH "pn]YsH+ ̂#rܞ ;3/= Uw$J&)}@J(*'rwA eVVfVe د[Ei-X$ĀGB+qVhHT1q(&ua'C HӲU(Eor/iۀ&:5` kb/4* eT=2e$}t΍xPk0p8W/3C9ZnTHW<= ʐMq?ZV(lEZj.\ŦiAP:vGAd/+S aO;"Sω_iT%׃ܬS%AKpunwuM ^/ü#D"x]BI=.PRraRw>3F%%EB'm|*ݟ﷡ Ӣ7q?质Ңpa+NFk;c%|\j!EOu"UX\N%{EkSLZIBn )qZn)N7ީjjxZq6 c)"yբ:uҸA#zC0^conK6q^VA'sZn|<>j{E/.ri~UtH,٤|ZV<;BCvHV0=pD}ǷF08KMjAfw$FnKq4;kKH=C˜pf˘Ǹrdi&[tD7Yy86 (,tvb}!̰Dnnäu_c@: !dK٫]/TV uu1Xf&V8HH +D2Ts-T:iVDؐ²O ,ό>f1iTFwI]tL M >PE|+PJReE%mO;94"S) r@-Ω/| r}sˈ.U5DcnY }Vls$]{q]F uocBoQ耄O cLMڿ5oP{.Fᶨ>sҤBw. >?x} pmuQl_\P2|l5]ًq7Su@DN1%4뮊Sin1=YJqAQ-@ѢєEa{(n4J5I;r-\ՄF(UN[,PߛCpQ@WB0vqh0>'j^>ء}Wfƫ B5OF;dQ5.ѯQM 鱙6Dt3սohD JiJ9z!ս/d Q)m4mivp bݲn NV_^pK3mƵ[cƵZP}#D*9\˙' 9NbC9&) ZkSpj^]S5c˦#"@af^:ńɞq:{ui fԠz*z29zqYd^AWfT70ZPK[!%-fzj)iASw/C#])jui4SX2mDr zӲmylGFBg+VZ_zJ.Kl*$MM@ܥ1;%a[RʨOu PT(+1j&Pn5 ~YcCSh z^Fb/ ^y;J""1^(IJOȔ5T<ՐW[~pTw$>':'U}%86OfGGG 6}_Ckm!^qNFd|Vw=h5H4#'ѪFhf"Dg3_(d>IS%{iMTs{55ORS`<3) [8 㳶+rh[DeF\S(jVsWMKHZڴb.d،xj`^2R葖OXcJ*ϥ,OӜqsa9prz9KI sL=*h L)e(.eBRv6-Ri5k`!*_wޅ?ɤ)@ty9I+On8<˺as]'gۜ3j\.2JȩI,w<3E[^fFgS/3דwu1%Pv0iד|W06OC♓Յ+=lu1#*RrT1mKnÑ(qFܡj$JG%۞~pLZxyNJcL%P'L0s+\w|`Bwȝ=di GJ`̜`mQ+](u*=Rp" p%"<7Q6`MUqC@@38YZkr9CЉS(Rg 2+%))VByJ T,.c<0 F2ad)@:oTΉ V\? h*3U5cĈR!@G#ThXLSSo.*h#I@dYm4E*QDa 1kLUkK=nZX v%EGJ~#R{ѿg7g%roYMoNU4NALmYGPb 2#hb!27aK¥ﵙO.n垬MuH jINZ{KA"}¾\zRx E Qז V KmY#xR.(Yl;z|" 3o>z؈)?QF '얷7gۅC+vus?ɭ&]V7_dʨҩ"R׳km5q&Oo/gL~_z~MgENn9C?9HZJoW;*88i@^ y)O5K$j Mt\Ϙ{%S3vLQS1)D_-dRdh0(kuFN+E@ZѫPA+`L|y.?'gȴ0҈į0 &BtdCxҩ6Y/_e/yT:;*yE48E.GX@ƭC(i{󻄿Cؕb7 ÆkR0߿gfFq'(D /ؖ]O Qh.9(r؞̒6+Ru1eӾRٗ =+vU{? =襗)>p&:}ٗ.he?(7.nqZJpi__4kYf߇7tMQWPS cjD 񤃑`x2 ]&F֢ I\y4뭤,;$ӄ4O2L[xXHԀR43[( ?pݶ@o C.NM%I39 l3N rN9gl "dz ?Vos4S+=&:3+ykOe:-<͹I) RHJSK gE2CܑFIr\xLOM͈ +UWF*,VFm[TS,IkF3iMޑ!cS8%tRC }њ+gHH'p >f~D}z.DJ%JD |4R41ҭw%! orJgQ,RT* x 2m*KT+lnW{CwLi:P,h)T0TYO ``1  ~4tTUK|Ck߰lxP=8> 7 |nI3AK#[6ȻrgQXW.O+!+J`bl>[5ArG4[F* )^|/ X%Ю'@WւVo;N!h4O!{GMlFJH?Bk4x I9/ёZDxa_܆u\_c1ysW,Dq-gζلF#8Jf-'KݬQ X)U{_pomLP%Rqb݂[b^qќhٻЗ*ىn< Wo:nO`Hd1-L$yb*aF*}F]VSwikEշ,s dΨ74NF:;5/Ϋ>1|h ҀbK"i/\|^k.d֑f^4+ٿuґaS3MuZ.47KGA/(6|sS@gQ*]e^'ʂaYojO!urvsꏕǘP]͟T*GpW9U=[}E0V$! 40/B0 w<$S1x".$g/s??>\|zxI >/* X!>>= 8Zϼx7CnJ)la*/ Wya U}a[OFH) !uD>yth@ф^q-:Ytׇo+hvI#_DK^?,KKȢRJ%u1ziaID,I@H:ʈ0hh6KV>-ܺNq@6tȒHLV#㸵1"B#x\K)|btR3V:I)Pk/ Yj12]ִl˘4Y7΀aF!Vi㖛 I4.|ܭoG03Ɏ4 hoƎ(ajʉHvV;ҊIɾxKvDβ`,sY4bw`z@ۀb7Et|]hnHP"\RH Zҁm"zDzg\t%Qj4ʳ#WF?9k6e0m:Ap:Ĝ"\Xu4A'z¯g%gOY*)gSX{کS: 5UX/UXtU(%d\[VgyAEWhS]-Gm뤨iaPVFݙib $olSјdc"k00 ЊvR>{$4)sOo˗o+:[F; Y($ 0_gp,1CU41xv Wr=7{Vȃ$!wuCm>'؀odN`VqrlD"WLaFL\͛*Ԉ>YZpӧzӈfUdM2 51år0sl|EHI@hH g'9)M)Q}ygff^#sx^WٻnT߮]R*҄OWҞKE2!ڳgU^eU^eU^eU^UY=3dDDb:$h4'B2, FY` NŴg]ԉ6iE;#B%"5+;K|M SW*,n%9I^ʔRCI"*#k< ,R :K>9m 9-9! 1""4֫e;HLS0BޭKi18ƍAcebf8N%SD+*Q6' 4x;SvL.#\ cv-@l#]eK~SPtqOwOPByP?ʉýJ1U]('Zi|:A3J{ {Pڒb5jk۵knck-dDk|ٻu~@G|~&+Bv=/a0{\O77|j!ֿscgY߹_B;> MRH*9Ԥ%ù|H~*3LnPF%`C͇u?`3(g &cy!2UdATV(n)~brTpړK$v ;D•?~v;DˏUzVL+1`Q/ܙ ̼]s+q |+>Br+%*UN[}o+ڭ7xp]x)(.։.ؔx,*TMXj=H cL$Tz#z~#p/sN5eS%Tahhk̝׆AcW8:` jCҿDL?pZkvoT.{&$AzdRn$AnZ?a)C.ŮI۵ZnW/˾>Ev[S6U[2uܸltn<V| =):|փ"=]P&& <ݑ=1yI'yKLlLD-UMCK u3\*U^{rYl+Xg TH߻.2yYΜ:y Nno4RsqCb*d;k|?4 ahk %&x<;P༬iyN k^w[s! 8+)ޝ!0JpsdTD$~I`se;&*p7P9ǜބ4gqJM*#>[訓RU7`U77(03B2x 5&iÒaZUӇ@iJ 0<dtP%nd'T jUel4#J\g߆-pSӖ*=~V^Pt3 QxEʇX\zYU-˺..-R^WrFff"Ϟ 5] [ _mQU6WJ{utJUrʡWQP2QA? o*D]rC' q4 #V@|8g.5/fVU{ҫ$jF)%mdXy+` ~AG kK9 m %=xfU(Y/&}C9L:@ H'^R3~Dt[GkkRwmmK@EU9qN~ jy1*_}+\=1yؘ,> ~j1?%|qngor3QWV Sʊ (.'?+yIߦ)U5v\"SXbw5.dD좈e X*3esZHRuͨ9}<p,f7ff+pinLvY^A9XF0f+xov vr9s`y$PR_ehif #?w`Ƃ o, ٗL$gh jv6)BL~|V>݂/ũ\<;w =YB >rBU&2'T"T\fPF)ebS~ΈIDe m\J8ҊZrF V -zC8 ԫl4sꮶҙq;0|boSs ٺ.>vU;l1K\OeAl'`U%=3$n~37:M|TpT{0)L$_.@ = ?SJ9Q`S mIV Ri(UpS鞢!={\O7ʚЁРod・>mlÚcÚxܰgqB `J,Ζ_|2Tw9"T޲r~g rJ:˴ 8ZZ~N& I.>LZ>2HG#Ko.Iս3p?m%sv윲,5χAI5ėVQmL}Ӗz%!+ C֮hhR]B+ MӜPYBDXkY-K)I Yjb;ᯗ:t# 4ŧQ>ŔSi@)2(cDs Mn1.L j IMFTH U-0ʔ*71Dc3o-'U4(a'Vy!yF KI*'De. ǸLU}"ҜU8ϮYݯ&-ӡ3qzMΈ;Ez8a; ^ςcvkWE;eK?7g_kjvJݙ/W9~λ{tz*jY>˗$eao?{Tvo`>yrkၷ3E#) 37Q1/kO2@r !tQ߹IL0_}9 ?~{t{ lEo6ǣ?\npsSU*E7dWW>9>)w^^uGt/s>0np'}K,O]s)ozZOAEO;6;yu\Fr1wGFcB]ȅgT]S(߲'`o9~Yƅ&SbSǻ^G5՛lkltҍ/8/Icfp 'w1H /ѫfT,9#$vz>=lCc˦xoxH$uw}+ 5COԒ$^[EV.EtlQDZt2&Z nUj_;>v(T*lY\DŽm k٬ݐԕjmRg3[l^/&͵ɱd ,ISZ5̤F=( j^9nH%ƁzAh?Oyhľn[ *m%Cah|s~g˜u^V뱻9JV{z5⒞D$Xf,1RYV$Z ʔdM|~mښLBn}mz\zs1w$|*3N &BM<`5H-zxJlCppԑ[@ouH?I~]ʳvl1Lq|٢^p[ DI/ܨ_c>!O u <&]FpM%P@h!bK;ᬾ175G1d%Cb|՞91C'D3Y`_Ck=f[WrU\5`Fgppu,u䠋C+WdTGhqj>9ZTwƽNKUp˹ɰB>yI:,P羽z%ꁃ:^mP}:Mܬ@9 +hQZ61LJ 'D6ީ-8hy[pZqIgZS">7. E',aI&H`b &87'|S!>jQ=^^\^ю %q̡@SLy,$xT-E1@6ȀHJȞB^W8Aʱ+E=ѡD$/W-~&Ke8$ ! څ,ު*=E?v)HI^XFo:hW#]뵟Ϸ牿@r򜻇:z7c7&],mlY$5\ƕ ׸rW~f>&֩d\+˂U%ܤm ik;2#v7%诋[WŻlԱ>bnz{I,Æ<%ɐT@Jf-QySkl*$hԇO!pd/b\)`IeI"0lAm#Fix`^ IfhAҌRRVIVHwJh 8'.m\\ؔG8b5F`]B#CsfSi$M]*qY E"+l.,+O+##4eXX`H5BL: F8έ8SdD<+uJXi`L[Ya Zu6()!߂huR2+ASbDzװ4zW+w4ug\-]zҀQ գ|UmWcy钔9|wAd<05~+ ?_~~3iXiusf>I3Bs.O&L𼯚8Lp ۣojJ4GL+C]M78>)C*-oC0?ͮe=r;<$*|=".>bՄ {nJRJS\T$%T=*ˢ-t>._;IY_8'AS:?dG]FPCcOc9ra|Zť<5;< Xxjv mfоWt>xj#cGu^Iȵ0%I NQZZùzN&S}[<ЋY{˽Xm6;;mlxk,L%BJ"+|$08&W(ϔ&&D\(c Ka鶔oK|\!5Uzx& L&g .wj^#.Cί:xwvtaϨ(wp7ϧvQVb9xN̆͡j]ꞪĆw罺0I2Qjx[b.=VV3% y"#SΣj"9?<<a兺=!S ׋PsO |Fˌ.M%PW0JOtdz+$DcNyMon5b*o_<(8b_ HgWLʝ^3+LO@=o?+m[&`(4N_Fu @۷ ~zWnzWIYq%vhY(Z.np2? MbmY;JFbV szm^#~]Rf "fC'^SIȭ$VdXM˶4>Q1ZN9mIjr24IejYXk]\ [ {׉o7"ekb:IxoGw!>Kᄌzx#/o.Nw_QW{ 0bKڠ2SdSeTH%n ʂ6V3eASWVsSɬ2H8MWpÄ#ٜˉbJ)alH+2獐$yNz)3e'h49zh)ЊdO3JV<\j@Mf';+P"߶5]0{m*.~tWR!V2U w퉊*4tt!ju0 havV:L ZuYM ^z'i}}׉sJ NK<@Y=wa|j)bŬOK)C˻k-(S]q$=^~-8A O #'q$Nb\Uѱ[ H0P(ƫ;-L uL[)o*Mtԗ75 gfU&j@;%Jd]'"=EmmP (@vHt0H+eR8L $ɕ΂h,ER]DpUk/n0h<=R'ϽSY,>ZZe 4Z JMTJi&d;-N i!I I"/ֽJuuGMrzAɒ܂ *•TxxQDzP__vo=i|%RR^eߡ2DU ElԪ6MSz0[eq&p%#mE] 9,`mAQktKO9iu]x\iit@D,dG>4z'T|&1/T #C"ڼ[ae;uƩ㱡;Ow$,fOᅟ,oogw(0:@}3{7=r?܍;uh݆hN>:O'xH^,jA5-uB͐q>8fsnn_C^X84[v@牠GJ oc或)e-8y 9}(, bm$ԥ$hvWI4̳A =i""mMmh䍌,5r %h_$ SªCig ï?ːu0c;Nr@jH_3$Ify@dbTu^Cp×}~IixY~B-8- mtp_YotA)t9N }d{Od8։K:#D y@4܆"koɌhQn%T4|q73+EaTkHPJs_\T3r6 kK.xu?.%OF!+'v',s>6}lذNJ?,!'3UU0xEߎуjy7ôlDՠ̕0o4cfhVDSi\󀘷xУ:IJ3 Bc䗅ZgK˪lTl'Yy$M&^W1ol꼬8ccSeٔbFzd]K}k W|V[Ӧ%kH.W cS%j/$}ʁ^B%ZjxvE=ӐQwaG_D: HΈq9؁!zC& Zډ ~9Bڄ52hHwrdߺmtȌho|(@To\Hj'BjDgm[]}[+a*˙Nc,DB!Yy?/^xaFƣo#ߣ+"x%3+j$֩~y*O(ZݬKka(oUdj@fm+̲ւ03z׷ѾB¬Kr2.r[-G>\t?I [q"ꐠ;[Bm>H) }GJ]UnݲRN uXY[%LJOq]Uk䀐$췇ltviS OwuV{sڏ' ޮ.8d}; }el$7e tjnm g%L(jAw.L_IdD2P/«xpUײw7H'kw7JU2x+7"λFr& ƍͅozG6"uۯm!3ks%VX8 ^E*5г2 ]>9ca8 p ۳r)pdMXWSzSS<&_yjI;'vΫisQ4nU:&_ٝj;ϽvN YuX^uR3(HJd*F{36\T?z??Nb`"I,pMzs7rw#_[?m|_ay3;哅]˿U–&=p;0ہ]Dz/XWwan`[iȥn {k?M^4e@#~SN,T]ðaX7tЃ$xB)2M**轂p7o>s8>\jR|YhӨIL~#t)Qy7xXUT6H^0#R*tkX)#6M-cܸ!hTPw*B]*A%@ŤA$6taQB[Ds4R[>ݍƟ7}&&o|/P!A ^tў0T2 +m#IEb+0Egؙ veFiQ-Qv>%fev(Qɨ/"##3#Xϐ 87xv}c2BwJBNb7¢AѨ7H=4QtkK yp)x,.=VqY@< u#ZHkƢ }qM2paT>z&5.󵼽bו sY}_l{h7~YwhO|w?r@;~pPGk rYZ}޹o񍶟w?qڻןoQ)"~S:2Z?-L(N8-:yV1/T-V+\^Z"Cg3i Nj~}>\:Nw\ީ{sY~׽&)-NSFtƺ}mj V>o;\KW,$ W50[x!iu3^Ũ`y AE3^ ێc3y&g-COM͟[ t-yB i[dcɁrmuXZUɘ/Qm͠7B<`LJxEwIO5%zW3G\ %%j7iL&)jPg$(EyHk\G.!׭ 荚JVpPr/eKm4Z#0,1iVL2ʨg!gH\Ř<8.\:h Sww Hk(pJOJR+$wM IGuVh) H"Ps&Ԃn-)I_mf^Z%Ș膨~%/K!%`~|,`_>V?]֜=SOąHB'&JqFBLՊ̺@D(g;rqA-tu~hP zH'8zo=x]ۚ'Ձ^xMZl#rqMNTGd^D#t:$7DS#H!ٗX.Uk~"v\czTB&G#z@0` 1k Eƒ{hqi;̺lqqrW&c=lS4_k4ַ·Km1tEWP8=!Pɺ%->P)λd&I ԪwygϰE)5c)eJj)XϠՑY1PÔ7XJz-~ ŀv^IxYiT\F*58K8 KkծKFκS`,^6qBS:fߦx$ӟ?}bgRӐicΨ@ 7c-ƈQmf7!@Xk(oh_sPK69١ρ.>G-KYHH._9KV;G=2i7Jt, DD(VGel|Z D:Ȗ3A9ނ<mN1zb]TtjIr.ňf { [u,kᠲN6-#Bg}>_}ktQw7̟ҼZ{Un^ F4G#Ojz/f7z>7o//)SӷZ}fǛN/b bktO_Ηwy-S5K޵ƣ 2{ZL*̻7-%^Rspib5]up[72@ 57C.=v?VPG`VZP{ |=yYZz2'C Шxtr 4lTj -!ܺ1FU @y2$`I[@Zy %?;1'qݳ+sbt .ڢ%9޸V6Ʊ¦k.L.L,sٽY^2.70&O0h:ZF)Q&4NX*+e %AP3Ͱt D(j#hjYN}76f j=qv6uI}Z"$_Z! IqS"&&=ϼt Pb]468o%xC ;mAq`1TIL y)m2K@)9*p&rjhOf t#@N'')bQpX~&"2#)Qq9.Dd E?Ju buXBQj~<*&7Вj1?tAi'\f,O8H`|.O ,9>uG %M zzV7!hQwWsn>56/>m6Q1IX"]>w߄>mdFw@yw}"=VS(sޮûnE1N =brR*=]-avl "s1Db1]b dANE?~zc:5PX?3U ]$|2U׮2R)6_6xKD5Nh3d^O$C&eQ$sA@x qMPh`-6@jmDI킌ȍ}LRi)#i'%H^ h"Ap(W^,KTL=FwIyCt-BrFi72&CR6*ۯ֤."ߗGIM{毛wnc?f"ӇND#ٗ|TPBSͿtlZOvV[$>*QΚ,N$w؛'xx=\n"ٗ?;zl`}`!etےB7jRWQZu{]n.p%: '0UrӨtY˧+tk>]џ/w<ͱH;>c58gEZh%le} hKΪQw?j (D8wTdĮH \ؕ@c~ֈ'P Zo/7׉y3NvHΒU-F=Y%(1j,A>ۛ2%lT=(>L}c&QOV 0侢L(Z̈́.T11=|ZJPgW:@\Zjqy]݁xr6QDҶ+:BԿto#Z{dӆ*|8g0}F_'k:o?;H 鳇DZy\G92f6 W9@|>@:v FUwT Dv0P*4SVxܲ^(6.e3)e- [Hb%TV?%r-q;j=E( dz^FJ׊y(ߘ^y58\+Rwݣ ஄ߋzi{w6( p##ybJ-_roVB-ψvWq[ 'f&DzV&)MJ-9d^H0JEmddJI#XJ>! J V1cZ!gv2ug[¢V\y&H#9F=(@|n 8QUhy $i$Q(IlRz 3$))aY|Bbb[46e=9A dR{hV]U4Ta]vz* Bt0!Z+p9iЮIK7 b @]tu˕unSzBv؇Kq5'fv$ Ȳj'5*ΑH+{e!UYE5!BW?CW |@f#fx5_WWa+̎8E7.J4 | ZP0A#eA0a &E'u=^ZghJNHxSoM`\`[5UJ>Y9x KI|̇Ih'Cgz\9 MBUcg{m?>Qi~p[qikAd %mM||zYx7 b'~\>C^>C}EzCYErn@je}r bzȱ_ 0y}̠da0XE2ĖܒNf}%.I%T]T* .s!y.ІZMD1*1cָk9Wϫd2=;}.Xª]>\{ '!9Gl>fov 1kuQ>QI~NǓp;ހL|څp,@߃ ]σq\~pF~K0ۋՏMztѺ,Y/,^aAck[$s11XH漥XpC[}}p9(F`8Lqd;tha f7xooz!`57--=d~'ްcB-min뢑N]/F~:p"j>vmO f #A'©>>+QaQ"paeĝZslHgmoSBc"xe@Bn,\d50Rvg-NyYB٫U3vA댼r6z־Y5 >6ª#TQy![eq$rqsa I%ͱ˄1rM8Ր#t4Q8CRzs.iiV\zơ|[0TJQ&yKa[$˱:%zv"QʑͶfkLla*$cB`q4BP3/\^E:St.NQ)p[ _.f(š$90#$fr+DL aK^ I1u`0h3}\ S$[@n `lLҰ cKaQWD+v$ l}RBݬ_?7am kMXokc?KJ'akc fǞ[.iJ`;- k=b;+ؖ1^@)S>+T 㴥#Jdwpf@S4'u֖xY2JRo9_TAXΣ4@rd֤V *0ѧlIb<9'tq$s?^ )Ģjò`NcFiNT vI0w[x T.1V޳FU`u);ko>hp(jYKeFrB2U''~Pޥ/11iZQ"2L*4&dxo2=Ciʕ2ψ0!mlH~ 0l%6[&A)\w[gf9 JuL7}3@ߩhs H'.B!xr)02̛-ZyvUdDb;#7 ]'Lz2ɷG+h^Q(,,xG^Q$BʿuB%*lk`WbQJDFqƨLLazO$*ϭc˚7 ^j+('Mz[9UHU&I}.?QIa ɚ04O>dA?jVcL\4&}}9\`,goYsՋ߼SZ,f)1`؛HZ#S"E*}7&'dq`kO#1&G3~nl.\|G(|߻ggk^W8m Mtcu jgHF8~8xC8ouw^L^<~lz秋@`Pn/ΜͶ_? o/K8XepULsOW.q!0Ǒ\v~8`C"ookE19B{Gj@]e Z b՘<:Ű`/7Sٓ]z+\LWa*o/>8ca"&x_GAܿŰ #s4oL. љ"߈Y}咎nx7{{qxuOz8Nr8{M?q_A~C`޼|{Rp`K3"s䅉 z_2rQh0*h5@ =yr>OF6_8ʙ5nYZdM{'p8BH a>?ÎhlbDU%Pq"c U`Վ2* ujNJ/Xj"brW9A`W3ߍ?a{z;¾nXWe8>5ܯ-B}aP#I[xE,EM2K]l,A[7{ Jo~-!J T+TDfgTn,dlRyAh8> [jy>:?J4g:n)PѧgH C:p&I zE2ʌ*S^ht"Wb-hoB; L [XN7%%ckˇР%( G37qy `uyo[K0 w÷o/?OF9C?=T! yy Q\ ]@ngKLo|VnCH aS0U1^h4Lź?U@Vt0'2 ]y " dY_)sO1Qɛr5ɚyRG iH L~9.ԳXz3`amZ4Z\$]|%~݄&_7a)_\l2 r4 8,Q"62> 0ҎvߵF?\{- 5!MޏǠhQ0(pIr65-%@F?*=g8͔ 擋fHZZ'E?qRWb9{~ jIvyJr2%nU1zEtd4!c"i$7XZ  o$k^"<^#R{\3x>p$;Dc;k4{՚T!M'yl/@J7?-D!h)$S }\Ҟ_vQG~sM.&6b~UWuWӾ,j]jfˡ./x;n{2(np2oUIޑƪ=8`"dG_Ye5qB1ѥ6bM4p #kHF O9B&P*RA7Ƴ@Q E/L7t{}^4S2ZS1:Sb<`.IaSM]bwZ}Q\GY;a8b]h=DP %~H Ҽ kJBraIFZER1GLbB`($%n 5?Xnh%~Abؘ:˽V] N3Q túDă˅ZW0 0Dh|Hڊ(kwdf?í.>NhSt)nO!K*;T0*L7GɏfKQ]DٴP\HHJ3mt͕H89GT=+RFE]ĤԸy1+5dZT;/ Wбt:,ᄦ+0FL_KR wJq pW h5&$tTY~V$[Dн}uoOD] nFd|DxT#Xt^ڼ m$JTk,ښ 0¥Z%4-f&b4VrSbǮTd^`;*ȁ(8 Zxl`FGTdD6X ~͂ sD8 1KGk94,+ٖHݦ 6 vb}vnԿq'|e b_]|{1tmJnHWW]Z 4,^nT܆uy5ON+0RQ,( Zŀal ;)'FmN` D1ow=HEO%q %*+`ģTm.* 5GB`VʺHjp۲] $!Hk*]r٢22Uc-6~7,(Ws`BG|L(~GJW?-cD ƳO+!ZPIrd^Iģ֑ -̵nM7pڸg7e+I* YX-SK4oO24!=Ttj bznHWT]9hJS5EuJf`7ym~!II q+kI$3s}oB?ʞQe2h׭/W'l̇ HnZ@v?,&ק{:}1;}>4 {N?\>ɟ8 d4|˟ߝ fv&gabgS;L&?{W;㤷>%Lez/10KO|v?'O e$A9NYo׽ &4^38b~x82V6\2O_1 ,\E쬿_>:G=i”jݗg"vuૹ==iV SÝ7/`qПAf?nL^x{?o0şGujS25z>|(?p8]3Ɵ n4L]@_M@lnp6gI&W_'E*`rץ9?FǗqܟ 6d߀uO0?2E&j2R.?tjj~|g6**Ť9[:,ZysZXޓ^z߂xhg;Ǐq+$~Ww,/OpUƇ,~zFqF@"^..8)^WڽK1.GWGq8 ppd:{8uey|MfWWp'p m^bRsy7RA0eu,ood䚛 "҄u\w,T@u¬ TFܵ0 ZMu emfB):D^YywU8G))wTGy 婋QJ:p4ix2F &d,k !1 J*(ቱS$`:lט`l"w[z_ rVxc0s&I110״KeSTL8L+Q4(v[acb9E$!F֒$< tV"`Ip1I9FKR&Fݤ0 Fz̃z<˵_GW=º)!YыhՀIt1E1JW !\Bp=+LQ$f:,J\H򕀎.E*%.!cb&%km[ko*T)F+\QAp[lN M%<|\sS8\( TesL&7Q~mWepM7kx xJQg°2`TTHKŸ*Ji"4E܂;r"JhMQ00ӊ%3bH" a/HV<4f1I)*UupqhG.3="bnv_X "53A.!װ6+G^DLB6dXF6cB n)cDx%]!8Mw!tM k |kB}!e2':HQRIIڇOk㳮P̏5F2zA(^]׈7I_rn[|X.e_A&ٶbYRFR~ř3鞞iĎVw)XE>UU#䝒_^ZKȐv97iUJ0zΎ9kSk]&ΔMeM . g#_J!f2g)5pǴ,eA7ʶǴ4K-MFBphf!%md kk*^[V+V`Tv`_]{ô{,\p@CmZj>3[[Kom9z{|{aXF[U~Q}d6ڊA ޔV+l 餟aͤqYjR֟gJUt?xb ZREJ,0aB5IMI>~onz;ۜ7M %pKU]) |LVՖgmw\f j{^t 8*UGmD>$+Vd-L_~ptPڶ"fE>M.X"(Zir ̘tіjaAcԩ0{nnMxm"#;HX*#P8R$ xF`Mb!9~"5j}&49,>@>Z]- d.hՅAЄf/ -^Kdpn&hi+e7 PSLT,LJ8JnqmRdfUq5S@$79%LhXyߔ*v;7d ﺗZ4Dfx%H{!MR_RE!2`K- ,=ynC3'͔۫ 5]<\_4ܻJr $tLjH¢9w~_LYh nnjYC>kSMDswI4pl*I)&VI*KC衷Ͳ$&5??+ﯧxR Ag?uI^`8(\'~/Mפi|+:zA#m}F iZ>2][j[s=QzB..If0%\1Y-Eilr-dORLiN?UWۏ˻iz @A"n [j_] Njԛ :_52Kz -};@mF,ew0*j0"LB?baD,S`ʒiW|lKOPGUלJiɅ+SWRC@ ׺N)\fsʄQhkpnU}FQudXUֲ1^4EˢBs%+/ܪNIΤEnUo4Q&7mɤ H~rs]ptJ"kn^gZy]_#)p1@ "ıA:x :(t-lsB 4x}_;9 (@A,Rk7 ^frh%K(X)~"u*}f2+Ef&I(V!sv¡ i5FCY5J1R;t^Fzu2h.9d23XԔmdm+ ;a(p{K< ٷZB%)!bV* 2d-)NGe:.S|R~M!X*WZh -TfITLȕ)PA<St׽Ԥk&X7e)&?ey5̀ɛ/vf(yꦋ7]fd";2R׳W B4iC!ifhv@8gܹ2STt :kֆS ;H~QɴP{$(?E(tqus!N.Io)|G027OGFwA-uc tIftڐm^+i ܅`3rb_DCEC di!U 3%̎kx'EhDX ;gN@CHKupfD yI*P!xZԺ8X9D -Y)TlPz:Ii  ZJ j[jRLOMea_gwS;f#>~Y iKCҀ:<nFw?qʤԗ@EJPsĕ/s8,R{ٱ0c7V$%v#rl{'EtĜ Y!?=I. `QJ~EG(}[!vV+-z16Y'T-lUhoZ\`|o o6<8dؚ]1#wCj`_Bd;@۱=m_6v'#5c ĎW,wZ]EI Ֆxѻʒԙ{5?h 9oh5KϕY7웤~,oEA9K7I~Ej}'>K1s"LWG2޴T~Ůl]$p$6‚RU_~3=LZ5<̌kP${HkŠ @CΊUVZfVZܝNQy*Ԅ4@E\xBaxC3sRk#L6Tꅽ.*3Y}l+$եz~\I#P(CUŚeWm.BƆ S͢N\AVvrC5 upꥍ&u4*uԁ.jk{Jx\Mq &0x%TIΉ|w{WVG.o8p3!"CTx ))1Vck \Z ö xLxM1+ppNHN9UX"6B, Q0ԔO:\l^Pi΁`/G'y 1ů+s+v©^w,偶.88M`ܚp*AL(U ,9JpAr"f$~+@"( /A uͨ(Ϙ(^)t6Z:B&!B;`C{.; ՋvD5|PTK˩>Ff#)F8Rɹiat͐;Vo3D/^?6yIǽ)+p\jpiEiF?~yѾ'Њm E5(4[#W4w䱠M2&]kWa"Ԓ,kj|@qjW-s",[{^ֱѪMn;ܛEp~l%Zeʹrd}P7%ܥX'xD 00T 4ƵĽyJik)%QFfn!tn1&45{*ID6.YX& V4܆w!rǭqЫX޹S!Ŧ+ݣ byvwy8_oG9YZVرzAZf j{3ˢVLޥq p;uG9hԎ{߈F?BiՒI'MkpUgO\D4 +5:E44!&a0y6Q*I1(n//awx5cHwVuT(s,w3J]syOAe\80N_%)0 ,OP f|55evA8҄LfM,&85X;Cm\80(Rc@U8X5:Q B{P|ߥDo=8|>9Q4_0,z\ ^$WNGEeNiRH;U09^NN!纓D/yƸ #E8C9\ c9&B1!!䊘|VH(V|,oIJPu$(^(o" B8l$k^YzOϙ`6%8FȺ@/;s6Et(<;{@cdN@liGgw@(~~"O&\/26y1ˢX$-&_@何 dtP(EU)9S9t$w ,jQ9sZF+hC%挑OH "a2Śi"s޿ o~_p!6;vo4_U<t'Q?AL"h_U>Φ /̽Q l%##H[BY(4i0QmCfb5IG*fIH S# K^>H)ԞԎ#TCoX M@i TVdÕL0d l<w5v7`/"~"_R2E< #$%U6E0d1 0Q2_~ϢVۜwn(6k E#?%OŤh?O*Q60o8\ *)\ uha/0?.G/8*?|ћ_O^_չN_8:|50_GO_u~_gWGǿkzWapׯh^6^4Q6cXO\?neمo&]_ ~~Ϩ^"І֗zjZݎCkf7Fezv N|`{͏!&H~n|4e# hö8MPU|b~@Rb"UAX X ߎMQ_ߎNu3q͡xLѡZO7Mi =z2V׹B݄!t\N[:yjîsمrisݣnw}?&#ξ”?_o0tAmhőC2܎oqXj]u"o~!7g;:2 eDlfIkד3,ȴ, ^5 Ilycs*~ǓpQapnss_[\g˟w Xrۯ嚎"r3ΞمRݻw\0pfl.m_$40;cN&JLv: T4 ՈV , +X )Xp[§CkO9`fd_o-.]#ՙ%|3>83vvΘ3e̦s#4ohXS'$5'` l(F!0А#vƿٜQ@d+4'G;KcN&2w:- 3A  COj"7-C"BY2‹s He:ۀYPٯ;kcgm쬍IkC04aj JaNYĐH4$25Jo"rgm[q6~hR$|a[C[+#Xj$Li%#$ I1FX:r9Bi:?c|T" !nR"p @Mpe6HLeBGX R)ARbLUR4@K&¯qEA厑fg.Ii /)Mi7]&0EK!_HpOjCGDY #a@]?7AUaUbs=3P0Ĕ+q 7*&]/RlZ3Fcǐ3SU(&l5Oro%,6JaM2ʌrVe2s"TAS}o0fш;1癶B+ rIDd+Ol:aB;{9pB$ٻ7n$tم4|U4`,xvň|{<ɼVڹ-%uK!) b*Wh3`=c$$Pp&a:6K@:5|ݝCg= x9󤏾]lJt.nvM~HQv(RKt)y7$K"X/|R\}~Ur瑩O׷p#&pW?OߑV]Og}\_|tD(1qE~|=u@SCl,?=}WcO6j^Clp/-j Tu׺N8tK]>'BG8n1Z2nMv$֟%#k(0ôk<\#^'Ls;9Z6]@jΆ|7v tC#&a|E[;Fj{tяӦ5k/!f⽱<.nl ![;! V8%ӷk~x"_p.K6 [p3o&ݭvW Shw\eU+Ha .˷l9e5uSȎqS _{9ܽ2¨֌{h`MMWӃz2%W5kv],k\54lNE5TJ{3Tܷ\iϓֽ1 b@q dEK<ܡIRT,aIIk'2B'XnH;!em4'p.TTKXΉͅK璜I̥"_/B$ WUf9xNa6@NKQ2*ɡݺAm [4@=k :w2r d98Ɂ7N$0:i G2-3-(M՗T[!mƀ̾[/zđewm<9n#,^ڡ˅[U^yy=鋕Hi2V(ﭞ) + sPĶi#jǻB͋uy rbV@=;(W bX7׃Q@"hOקE/Gv=$с%&[WZh 9heO:ԪtFo&dVPJajZG|j6 wzJnFӧlpREf2a݉U30GAt]]Ҷ1"(\H@q(۷oXxzu~&CbyS9gگK[Du՟(ї7og˭,0w$Fɲ: 3t܌$i.(Md`D\ձh*)v&`u(;c 0\aqS+cΒP'ɜw=eO( ؐeGn>]\)p^R,sCP9<8<| Y+D @H:բ&Ui98&D Yhuʄ8ϲKN[U781eKzpZ;YN(Rf%). eA+hZ1MZ{^͊lmX ⦷VtԆ  dq#[LbRZ sQܸLl d7R(c:4qF"KFnHmMh&v)<#ZJ Ck( 役{BLzۆV&L~ƭSIOaS`$IRX@L*Dv JnQS&9g6H|H's3miL)rҗ̸\$Q`MֆsFׂ5oZ0ƭ?4k%8|[M^E[M,vi%N+)XnPM@\ǏBNhɳF %We?\zs s ϷgIFWZ܈^sgi޲$ԟMq62.`(^__\.ԛ]97b9BMr.wv4(e?ؽ/C41ӕ7 ,{,1fY[->B܅_GrM/O,¶2SML/3FZS͟\dt!QTBv5ȮU VkPsyww:l(5aNv{Sh#5T V>lՉcAT#NP1 d=<fNۂ1srL+mLTX 8# hW1PV΍PoZ} I\Jfel&xyF*xE&2W KsŒ ={lHD|j37ՖKdJ`^B,Tu #~rj*\zqTLoG+!/z!@/>dqͣX1*c䉷z~{Ϸ%YS'߻W<ΎfW&^K>~Cr{z~QLrgOS%[ ruN_wO{s*'t!_)#~޴nX,h Duھ#ƺ"̺2n]hWtJ87[5=֭-BT;bY n]hWk:)`;JXl(2BCG G(p'.N|՗T[ '~pZJ.mS1Bi +PSyዦ(L͊+QJʹ6)2ђ/8!qEˬwk]">LPh@[~D|[B !{+|}r@SW>$4 C`^8Lrރs_ ( n`i {ka27,w F[`uKt|όg^+!JƉ{[Ϥ364Z%]0WK!huñ\k̘ ݄|lulj9 Fhrc&mAƐ洟\mXܤ.qY'2B1}D z؅7#}ǚo]~\C_XM-E {PF04_<+]7xQ_Z BŨ ~tO7y4u=p3Jy6{x}q-#iD_.r 7d_$OO7Wwsgo*G1ur3eAc;͖ߐLqjZ"@ʳQۆl+MKt!Bc'O-~_wb([t[( ibqtF:Ziyhis~ Ê!Yec,Ir|6;d[~\? Ye1íf?D/ TkM#?GsAmp2Sډn␝>ڰ=GP)/ws`E %p2\ɫx}gwL5TJ5TIud՗TJCAPnwD'MMB;А\E[I6>uk˃iv@d`֭=RuBCrmS c87Ruk˃iv(}@օ|*SŪוsZzib@nur31v!IWr]A)!# ^G{lVlz£a"|՗kT _@|N8?-/FUnQM@NH 4@ #P FbLȋRмDXP9hHoO|%^6L6Z0ܺv^% SH6gCS^M wЧՆ!ڕҚ+y 5 sUp%_sU 53߾}MBF (_v.0>&w\[g8B{Ml߬ݶ7ʱw[wc|ݻI뫻2'@SI%tRF$XI%O$OaS$EBYP*]KJIԈret(<璅VBvx?({4"hToSJG+NVV4_]L/UHP5RyJ9# fsh/Rh.؞G+?fmpA+ bqJXd `8^\ĩJwewz`pB< 2H!Op_6^s$wp},&gIdžjeSV' b'$\nrj 9bKCWyx.Avsƍ}ۦ:[uSV^vcjgMG;&ͭ"4ece[nTLᗛp *pp8܅ߟ0 d bɺ2c~o} fr$eU:7s@"r?UOs\;pBskWKLS(Jvl{ 9t u6$R«tpxQLu>VRDr:KY臆wfbW$fIgHtrH+ImfwkΞkü3Ϸ0P}kڄa (<( u.% #236߲\iW o xה _ d,+9ya88͍sY:H)LfDJ&%+˓J&tQ2`øjQIf`,sږ*uکYqG/#hy46WSVDMDbCĨU#M^$iNWכ*^*i}(i]ݪP[3F]C5V 1-!0|Jq t0]#3jQ-B1ByRIl)+{6(:6o6xQH002&襖1GtjGk:&'I`0?ż O: m7䥧gA{͌٘V.muMYBxcW\|:o6 no/C9գ9 l'[`k2oN->Gss*\tVa^PaûBN봹#pUOo\b<{T(S55┭b Q y2#LS8"ֽy*a}}Rm1ׇޞ#`7[؆Kdψ}t'fRLRdzRpEjJRP \hڧ°}qi;)B}۪RWÅ4U R%wsoKI#Ei.4v &(6Pn aJZH ~=w)ek|>0ujΔjax%34׌  T\ҁJmyMsH̨HcBQiqNLg%AgMek>  4X>H7y*Zٮ K\ dq X4ȬH 7dժn$o&%eAypJ՝En{Uks2+]9x=;"wl/Q RD 8J-"RXYrgiK(c4Mz&aĽ{v$uyAF*+~!_1싖wsڨgt#z<η@~߰uY>U}'l4;l#vč;/ dß5Ÿ\:orյ#]_Շy{mݫ>umM 9lW`t(3:DU n\ 2]VխlN_tӓ&fTbҦ3W[ЙZ"sčSJ]֜R18FS4:#3z>*?6Q1Q7& GBbu-aKn@pޙG{@.s6}}fr/lugc6D)ڦ ;-2:r<1TZ$23BHzxѣWd E:P[*\/ޠb1 du>VGǴ8@&Ш(0= [FDh"H~،iԡouPV ;/ddHÌ-'"߃IYHbʰG-LFiw 'i㽪K4ӯ?ګOO׋W:zڝO9'Bd 翦kt{K9/21wQኌ2I|Xc?jm= ޠUA,~~zP6(2%3Rb.jG6?'z/8 Gݞ=׋ߋmZ)` 6MҶb=Q8&=i'aV{ cJ50C>媘TóGT,뛋ܴ  {h)PJ*Hm S m1 nBP?ȝ쨘F@?gHQ?(X;uh{TrI \hS\>kcJMx/7'ܣ$0.`s+yN9TN?3b{H.Vsqv e:SJ=#Hw{v~m p”ZZ(tl5'44ji]9`']h{[Bk \H]8} Dnz)&5[ig3eՁtJ?vpҳM;xqnM{ffE.Ӵ=;ӆܢiM[ZޱjvW:{^ .tQۯ,۝ ;Ta6wy[]@Wjzi/N `9J^se^-0r׵9Sd'r28dnOPlCQkn=G;75^\"=] ^dyto@v{肋-_^ '$yYr4ݬmH۪Jq'V! N2cE.ͮOOpkMM'~w6FQڈ;o-Z(N. Ʌwcʒ%qֱ, Fv:;9}u *v&}_fJn' .W,ToMq~#LJ%F)"Tm6^@|/_JI%a|`z)y6sV\զė&* }*UB$Ti ?}V>HUk-+,r#% RI5 3QF(1Te : hD$Qxcɴgr臞1.WUrQ`h>7ӊ`c"3:b5wmqFh rBKO^;K'UHHh8o8KjKX:IDLbˣt3&9_bN{鉿j">\ww-J*Iq-~su[O g ~+VΟ~|;E'foOOI;PT?BT?oOOss|> $*B`%oOGe*wٍԨW0b 3Xj!AP Ikyhi-buuNK.Ri'`d:HIʠ 3g/ [NKVJ噠FǮ"|TmgE]D޼*0g UW2sF(tUh2 +*?ӂh-+ƣmQ樟UC 3DR]M*c%}$CYNbHT66K=FƸ>lN'•&ns(cXØ3k(NcN%c*Y㘓 -e1)%ixP1Ӣ󘻃Q/2SȍK,󬥺~T"h:=7TxT FX0 5=|p(AqO3NyF<SKmx0}O?Lu̖K<9~Z%^j7Nw1= ,.b3r= ՜L,^vAcՌm_~|TMxS<5+/SƄBQg1eЂBc/?,Oڢ h bi-e_ռe_NjR(bs`3iNO==kU>̆ aF 'Ct1<{R^H.UڄG 7<G6E0h`G]3#YY 8ጁbŶ[ ]#A@A.IIK.BKN;oSe-TgBiGbJtsZy'-mk CeЎQ ӝia3/94@bE $ʧ;O.G*1*TiQUa8mU(&7?3km+GE`92d݇`l=0xՑ/#Nҋ[|9,G"ţHb䰪>X,V %]O3Rer©*E)q(7lc0ͫž8w޼6 FI=5CEB@yiÊ6"Q<ؗuՏ5'd \ 3\m lɫT%dBcUSQhb15?TWNOY$(nDY3HPMp8\%TOH0d׏K㤅%ߴHnZ$2"Vz.#KPLj9$"+@p4(i_paJCPmRS+[1a=2MF?bwsN~zzٻ$8.V!Y<],oEί_,qNS2f7Acd&Hr]Y87ST8_վݲhGw/?q%}㵫e/=9ҍlwxQ;u]^{e8]Y}phݑR;H=:!~, ;F ϧip%z={kVLl|_ N^ժ:d}'YНf̸o6teAh77fin֭}\3fBZCB"!᷒ KT_ZEE'   7}j1n4!}#{9D*%0&s0 w0Xw:VBW9GD ľF-yÖ`,vJm*90'}ec6:PkO !8XGň^* z$b|q؈%#um;guwN gm[uNNOBLm~s߆l 4# C^|P5:P%*\@}UeP"$LTo{h)Z-kJه(+]!EHoS9˗}zy8p>Zၑ}5>P4hu8087zB82NcQu ${:W蜫)%뤨l$+"TՑ(Dri2p(SAERB4GBcW DL*=gi("jY-8kgu7QO'hÎRSǿn8ZbaDSiݽ.x ߪsZrmńG JxHRƹj+9;% Rgp[ ..Rtr/( )IR|D%hΝ)\}ťHUDGcq+O܋k, Ǚ^k-?ZHfČ2k: SW]ކ1\Ntm\ӵ58|O ՓbN|D1~% qI/rt;lfG!N|._IBQZ{zN~ci( Q( 1&Tvs_~nI/ 7MTdyLԲ%D:(1Rܛ$MB$]#(eveG IE;5,gIyuv\a _^-VєdYO9_[kŜX\IvӀgGLoϯ7 ˙0nG۴XLB&U`0DB]3~@/{ rIfmYB! T(|"Z;"e{G"wx` $)b!|bM5V0][\~98^Q_pR0uܨga @XbLsy8uPW7hƉ!/ שF9.p M qTʀJ}N$IZGṲߙ)U|&/v} 4tPܵaNJZ:h#&FhIi#e`Z{j< |K6ƨBH1r/AN"(8{ʕ{UyJYpN1.!Vj5N6l qf)JdBc qս3Ѱ|KFD!epЂjcH-:}.(.AH4aJX3 Np.{kO=$b-ݩ IQ-IQ+@7AC0TQ53r:q% %o=56^h[PFT;!)D(\g)*%KYs@VD (8] NR <|JDa@S8mCKd ? 'Gb5btydY9Qq7O.{(I-.N" M[XR P.;0-c=t9jW 5H==CC'WjA$.dw_dzzp,'$JɸiSVv?pWWJ)SmU3U|x8JqK0 A7= P/)_GЩOﮮN $%Quxn'YY7?}; ڃεs:j&MN8_/]67_N89g<"7?8拘PҋT$ϲ$ϲ$ϲ$\!)x{7nst8M.(9/+rX灉ߦyqn[IWkSys jiJ[0]$q > %t jnRIxܙEJq]! ru촵2^g!k¤K1+RlҾ EʔtEZS%tNK.x굕Kͳ媣ҙqvxAH( TF!ix].-s XmfHE-CFeTQ{xEdd:Y|}D 8Յ]^ռ&쀬5rcE-kn!.~~hpNzv-VE~wZq"ho =E[OЄ<:ACZ.횽e"I BV8]2i#52QQ/QwN#pϾh,coW%;J_拖1jdBs &j٦AD%@u YYE ?5x8 "\<;ǡVOHFBu B=9}{9PZqfrʺpe+dT[LZ*=5H'QRu^kt. -:^lOe>pAE?_~K9pϩcP7ϢL/x;(Y*DT QW)8Ɠ ZSm^69jʷdWok3C*zjy5"'IPtlbU:Ի2;1Zr.N = X#^zZJ:χHHv3Wn{1qRwg9,gvzfwr oFik&pg4$ ~%D`s K<ޑ?ލVQs'Fmz7Usn#AZ,ƍpji*݌@]VPփS8͘Hzխ|N MV5[}fotjB뮄rot-rv褫kw<[rzL+ ,*(^hL .205Ф!LNNEhizFR`H)oؗoJdV)(E:)f.`}Yθ97g9欞qS,D y hԉ)'`$׉[my3GRUr҅Fh#j回J]٧ڼXl1IǴ8oEBѺYҷ́Kep d1 ڞhνPZX)joNlW-,Qg=O#ŘO;am8`'#G6.{id;Jq۽Lf1l˶M$GOZR"y*\i mܳ`axPSjF4ǁ Q>gda`R? =| bArNl Il%p tأ\ms4<NZ櫡8*#Xi 8-] PRʈ胣 yRKE H(ÛJdeDrHT#'!Ijb0\kǬs-x֔_mI^%GFUdݢrJNb+Q:<m%#T Y C\J ސdKQKLIJ.k"faD7>./&]4"Llan?/DhМְqJib1q5Na.V1S 8%RIbf\%$&19;¥HJXƈ(SJAh%Bep3A(PϢ6EV͵ Hc'1 M̀ds?%q [ɉg"ic ҉0NZ;3\qK`Lǔ)GA1m smdx)A/?ɷO_JUÖw-]KIDΤݪWtr5F2pڽ?F/TyZOyc_?%XMl짲v:=%eZ^:3Y\~|HӒdă8pI:ܖv,]G 9֘ $bƩ"tε_Oŭ!-(neAC#d'ŭ "%1E(#b;/!rS!_3qnwO1>t3j]Nc@H#m sR"J㷃U]%q4;{o11EibIbCiYE8L1"R(u :e ԰y>W!ݝMy;4GӖ{dA&+^~[0ZN;xlIfRֆͺx>]"s;htɲU(l"goUT(g5l@Fc&kݩeXnZ0zbKYU*lΘ[^[/Ňuzc1O+bx6gB8)3 s"r\G:$SX0E˛-s㻹usŷk|f{l;ӱێs'y^nz3GrRs>{>jΟO!~L*a\X络?b&[l 0_('9xZNأgvfl6kiӮ=;ͻZ9@c:WsiluM`Q2ш=43T1 -5Ԥ$D21*l p"ҠXC\Noִ_xr^BSNMҪl/sz>ϲbZ/Њ>qj>yp"EZ|{ApvST!]7rvdHo(fojl=G~eܭ%˹kcT\]ذ.|&e&IVj|KM'-@kO]{_}gH3r-~8_5aF뮊f SKvronݠ`:[>k SQEFb7FT/]\Nsaudثjm+ M8;iyq.|dPB|!&/B2z'h`=p5;u*wfaoK A@oN|2/]< <<{da]3QJJMIxV_M6e.O,XUGnkg{ܮa& GhDȟ󅈢ĥg!B=̠b`m>f1U߃vOq)Xc1!.ٓor)B]\Fn x$sc;GH @9.x Û=rZK֬yT˫Ѯ̽W;OWOŃUrk/nz6g]>F{8iY쉆9&n]/v*6T_ΗdԎ7n|k~_% ɻKu- _ г<" `|FUYǍx~p٣p#/?$X|9 k'NƠ@RD1V lb< :%]$ZLBLj#`02kXSMOB&_ER{\g sA{lc ~$dUP~kbP+}ӓ>QV.@S*n+qĺ֕hN ޽w}5tyovG6TߚVس%̾N:Nh`͋vI}Xa=0PChp>Y~O0~P4ױo3Y—- Wsz {GsEMt(D hE)(kjqD~|;T]иEJ_ڑOukvBo&sGtϝ#8}ч4AKl Q؄uʜ"yl,2YVm$ScӞL/ch}MQpFuH Ψn4xtwE}yxlfZ8c,0I Gci"*c7Ež"@a} (LT.}F&NE'x ~ ]+S V+%y) CS9;IOF Z1f3vba.en^ҀDF$&)AN/wEԺVT\&8[TjrAvv~|0̾pu+(`C_4p ϊ~0<-j/9+=LIr|]-y?ŀe !$RFPSO@:ͷޥWV*HvT.0Xx/WqwGr8>qya|Vkw8Xusޞ'{ :S[%mUVcig`̗Yx2ݧņMo$ ^U{6y] c5)tʴ ]D{0|ٻ>jU:*oooIxFtn$32c[@ߦ3A]8A%7dPsҕ^nk-I2c&S&f&êuנg0ʏjzFlwRbR|uX?KT(v]IX\8RM\ĈDvEZac!m # }t9Ng@_xguɄN$"ۈHF17&J Pݬ1@±G G$4(>9|JPu5 NkMkmFEI+ wCv$2u"[>俟b$nvK-3 vnůu!Yޖ>^98/K#>(Xi-1X=#zY0 ճQWir}Q^vq5FX[<9xm ;O]Ӭj8{kgJZ䪭AN]|8d //cTUo T_&ZN;B˗RuJR>Lt)K[,awhf_:wrNt>ͷ?E;!za~h~ Dio4%)E($P7己wׯcs}| : 3Ne83!zAj R}fKn i / Fl>p'XW?|g[DQ y?߽U8ݟ"v~kwvj* m&H02Je',4(J=3͜_-g犝q\g:qz~r;QvwV+ֈ_iƎӻ +hʝMngW=g<TJ@,P^_iN[%!z|?{.N1Cm2G֗6sI.5SҁV rւF=~"7 `j8*cOXEɵ@%R2 !uV$P$+>ڭu> Gyޏņ1 mL> N<;[|ѕ0 ֌ׅI꛺,#DR ۧ]q~60fB][bЋ24xSy魵>DoXRudI 5Lq0v_2֠] oߦCJ0oC<ċSg Oюߍ~ݬUD4>?E_0q̐ V{~վ];? o[F{hA:2ȶՄod<*r`Ďuuu: 㴔2ĊӰUV;pxa2i9kfAC,NM;r̓֋͏@0>G#Zs0aG@XE@X"fKA&Q7Mß.ڹX[;0&-DAVq9$\1`>a~x ڗ5/JbidbWZ<IS~Y8=NURn|?vI: >O;4I쟝 $?M rs"WYXJ#,w6Y;(Yivn8u=WqDJxѳҭEеޛG BQ#,ܒk!QQM$ J\InG9 ? ۵kwU;CW` C =28(:?ss7k`(*#"(G#`p+κG1cz9LX|ə8@ ynjUz.e~4cBL,،FIs8ҧwo!5<6/Z9`2"s(P'RW"6[$h5B+\הW볓pm^ Gse"1>0, Fak-p?\rLq"dLg5@A Q@u8)ЖnhϠFbacF)o APv 8&9}dJK%>v #/1hYnWA#rB2 NdBKǯ\JRȠϷwȅ'wX1T^n8CAad^Z|ivYkPmڲd ,!?^/Gek^{ݷhS4ңut^2IXqAof}w ^ xjUZhbiϯUѵ{UzG9tO&W\}/yInw_ Cw=3Ŋh&c^mǫ*۝WZ4;S?:8'YqVh<%~U;P.*S.H;U~Oթ*u+u87[ uw2 e˲/DyeMG̜Y@ %P ! ZɂP* LVPnG.ԩ2,r _խ Q)JD[2X!ҳV@:RKA]4Hɀ! A;U]a}45M>Fl1KRQ%nΔH$ؽ?W* ŧԿuܼϲd\*P5ʲ!DYg 76 'p䥷{_QM &u`u&|zݒO jwItoSQJM_) B*x%QDfQ)f8%'{eJ$a*06wgx㰶1bk1brHOR23R3p q >OC[.\~~yJ7rBmNmHv7y]"`ظ<ŧlF| կܬd6lk J_9!ZJQ"4zp &KNh4p7pQ "_!eT/~N_7@Ep&:N18 509Gq)>:"Yɔ)dG˫ '= f h+jr48UnyF~7@H7PN6r<+%D6(4&܀U\*rk"0 g(Wf(M`,vsYEF9e Y;VQҬAU*e NCʈP8& E.sQ86h85M 5Y< !{,u,Zi d#,Xq+b@Z6Ͻz9wVY`F &]UUU{&dd+FYP2mzciTJJО 坓`Te '-\p6nىjRbAѪV6fL̤d Z b04:L1 SM0jO!d}6AYu;2M78_) fAP"{SmJfM}Z/5%B]Fŀ1.8aͯ}tx52V ) a|/C^ cT+p&P69 Ln~e!mÊ^Qx#,E[cC; lq6? v8~8hЭfzK3]bݻx8&1]94M4W(΁,\ T-c:ҩ]35_LGAλ񏤒MA9U([ʳ7Pj:oCvd2WL9 6E2Wt˵S)aKJ=Swa n;4]ǿsG7s8Qeǽ=g̡O"Wjw~PDs6hnf^!R>B+{3xZG|ۊ}kN(EiTMx CdD{ٗPDơSzӴ*_z_ zb%.l!R{953*;@薲k')U3vAW@)aA\`DZvWmU:yFQ- 򙮥턮*f*/ )kIVFx[PaO ĂV|ѾB6-'l+RA&ˍ!*U8IZ-V8J7~g8'ќuuuue֕k F;"*8f;<*3ԃRZcBTR]Gu;Un{4Ӑɒ}jweqH~,fL0nbg%/YUżJ=> ~A"-!&= ǖJUCLpA>1jYcDz]V;V@J񠹓BVʫRkB"Ć"R[JVgC \3ZkZs*k#4.q*j"TLA&oH<sЪeF# 6*H6HtEZ{4&-X2 l)4!#Tځ'xq@rї#hCZRh: "4Ҽ^mx*5dʖz)HݢsṼ\N+H&NGgT+Xҩ{ZKDRPKJ"Q8wVׄrX'z,h1uge`OgqkڱYjBE?3N@0)5UjV_/ݳ. =7lJ 4ߒzcAGz?HdLa<&+œQ "&"0ex^:x{$!z2D(u+T+1 pO> m佃Zp{L3WScVR.i8p(ḟ,4k+yȑgDtYpE^m1vQ oxtUm*/sGj}qhc'F7Q1~{_ >K2aVYx_lQVҷV^dz Ơ*uq>}t FzݱZq^gӘK1\]H_d J5{/I9^A3*vS뙳jƆbfs(Gq ,=ci_4ZrHӫ-(S3i.ׅ֎bS;ZYiEd`զԤ٤bFA9`e`|&$/PBŌWiZ;i휦s\;U>ց#E:oTS%PSW!s^^N_-PQjǷGf."5hF.)s>.Edj I'Im5Za0:AӠ JrS[]J#|MI[O(c^zsͤ2E@d;z`PNE;D戤08t?1:~/[4l7Fz %AVfQ˸Q:bUTi`rBO{ZkZڐ*kܜIY=ɭ͢i$A%;tۆee3R7;\eU ].#QфFtQΟ)71_^6 w>~,Z ]uK*.ĝ!FfhN@?_FEl 1o~ wF@.ܬ>up+],5Ιy䯗"`qg/'_s9>YBlWA;jg*zx*>WӁ`m|X]YO%]]\sd*Op "H$A6!$oA6->վѭ~1ZFŁ5z ӣbdٚÙ%p$ΨgԎddtAUԐ:»!kD!N~|4BWw`eG} n_kdpp6kMzG;aţ+\õ]> Fmo㿩~XO"OIa[2 C;Î9y12܆5%slXN4=*jT*ys\{HկSԼ$>\֓ռ4&4vJ Wk-2nIE*j5c^"!O BE8 Eh7F_7[$$0gg[899]sjU4!:Tz<%5d.P>ɣjEfsm|+y֨+qW/>q!"oR$9T`` $:D]t_|ٳ:'X{âw5*w\ ĔXuY"sI5l3Tv.;D{꫘O_&^R=a:#{ D0y:_`%HoOh ZV&ъ[+ TU h S->]: -u ^rP)_e@tD 6-rPH%$58Z[H+-Im0HЁ uu?at_FNId ##Lgu ^z6]&gu( *,L:4 d]*Cه `qM ªTJ7+]Am@hv  $w^ 2wܮ i#"c\IL E9NmASabL2<3e#~Yn3UxA%@mmxf0Cܢ56je[A츞sI$y^vwir49Ba8ߋ xb;Aڢy;kVm :YUG 5ܢ)tl _(t]4@ >5մrϣ+r c/J ԼTٮg&P{Vf,L&/eNq ީGf[t\C1;pT CP:GW2&X[f+[.R[iS t^ywHDS}4YobǠ6֜ J #*N+h0|h(2^7Q#S̺ɇLM@"ڈMo $Ŏq6\H^ye5W kW KTς_r:ޟ< ts㝼 ͆_qTˑj_H YP/J #/ckb.<Wa3Y /sI=)0Id.' ΘrL&?atC攐{9#5'N")+aAS TmMtD X'd[Tmv8QifNj\5J*u)nib*u3@`qvRBqXҤy7prM4gsu-.۝SInpWf\ k{ZPeU5׆ZŖ6 UA]kkѣ((!A@ʧQTysUmE*`*>5 6g9Uyg*cu*[@ԀbZt BfN3fT N-.8Z3k:AЬ3pD@Z.4buD- Q[" [<ԖQ3-< $hTDH0ΖAE^PPeGI.@ָf$uqR'nA,Abx(]RScٞulvLeMsC}<lUXXl$wUOVPNXˆ:a,JII{% Z4*ЮD#5p*u5a%#*/qq)G" d3- NIr5Z% Dx޸3ꭲeC7IOK^W衯 ۠u:3vZ* h|ؑZ4BWX^M5$-+[3#<~:Zc@U1jwvy1'7'W7ןOZ?xu(6w޿yy{~f߮?6%fSxv۵~Sɻ߾(6Ă1P7t}uBhkd2 STXpƥGRfT#SPQ&е`$`<#HJC" ;UJ[8qeT'=d fɯ}Z4wd..Wrg.<.޻TS4Mӄi)~C\*3vpO6^s vZ9<m N`ief;Y's@fOZ)Z'} ۉ*g㇯#H :)VVY*Y<-(HBh\QUznd<9SLt :0"~3OuBc: U4@HH<S;.p:-⩕' ˽ib^On"$l^]O+ˁ}Fb?P) x|77a\\dov3;߼n mgR??K&q 囓ss{ݺ_" ~  ".W޵6r#"eO>43,@. MK\(vfl8[dWYŚ[+=:mƨgg'4*DwiV:$v*&azLmk& &iءZOFw& R Ʃte<㡪TlR1 _)řẞIR/b"cHV\”nfj_]ۢtO ׇ_czIFݙ TN4HQ@^nY?|DԦDe(DIQxK sǰ 3mȶW2GPsf=gEÊ{Ǘ%\hWGMZi2}x?5oIGin}ox#V؝F%n_w6R)k4E:0(ձp%"@PN30v ʊə5uӝ( Өa׿k܆9( 뜨pmGG*N9wvRri"RiΝl:Qɦ＀S_Bk^mdΓa8O<ahOޫ€V( ,&RD:.Lh 8YJDQtF+;hB(h/o&wE/x 9)5Ma\L $"[rٶ-f G@*%-ӍRAb\YdD iD5Mŭ!jF.ӣ6!>w7у`2>Q@nAޖ(kDȇW3k|&'ifcjv~ImXgp\3)yEee.4k]ىkS˟~u+[26SM畬-As/{|HYHhzhZg'7;ky9`n@t{L8 7=}/x|uq{aﯲEɢgVD1}Bdm=Vb ^6`Vi{+8}u~Eo7 /lx".txQ.m%,Mp^TJIdc )Q=%=\P x,RS l -t|ժ+'9Sl9,u?!ws犎2q~q3EbsoUn5un}uQk~T{ 傖ӡt{/K_Km`޼UҐ/:ZӍpty7PG擬$z8StKxeq٭y9h+NKKAT9ꬣHVӒL$WnE5+ej".oT(8Gתd$e%\[UJ׀jI4nTka(fL㸵!HBvRqjRpBĨ#dwAe^?69eH.MύA4D| yjJ _ TܙPTiJ*ɇwjN)XjHb6z# FMl 5W«V j 4S1}u<Ӡ( -GZEQH/vQVjQ<j\2N9+~J؍m#r hZHSJ |I,*dFx;eYr=T6M׃R}*j)mQ8R~_Bk. wVT~c>7h{[΋EL˗W8R o&IK^`mw:7C)75- ~WGķ .@f#zn[wjoT-k)[o|<|FWS|~*CJzG1!lIaP~Μ%tt<[4qw3a/=إ{ubc;x? hm@Bvxu[j~ì h0ʪK!t.Q۶ۦ r|M5\ za,3j%Ro(xZ~Sh1<߹aEŽS?{Y\0!(0Yoڤ }*뻋0oEylȁ,f7piƬFuh)*RTjXlg 07$L[YR(}]zuF*K3ɷz(NXUr;EsC,H[ظl0+oS`WoKP7JZk8: 'qWgO UJ=f;B?Ie@RAڋ0] )=ȸā/v/Gk9}4i^=__cx"d_ 龨t_y/<u^/j>B+%JC53QRʍ/)LȔ ]R{ S1gz/ȗxu[^^P'èDF_&v8jQ&QΆ+O霣eLrtU6|祌7N .({jO3軡{>}k0$?|K\$>qߓ}:u'_N^?>I×Y>4?vL*h{OAϔނV%JкR pƀ ǴУWܢD,–O.N,T2 9B+g"gF*3Q lKZkI~ΓugWv1u / (dpD9ta"gÈ#%(.lctFƺ介TnOM&YT# }F֢89BF aEgRM&s@-ʲd^X"88-+劐AKR V-EQ軔k%:]kVZc$4faiVέ 3+3-+?P@Ⱦ JjLaۧ<_eBxaS^QD()㱴:D8Dn(%ԕA:Ti^M>ޕ4!o1K<A#c,e)U1r%X$kPJGW7APף%[@ }69M<N :q'i+}:{@h4Q2#A輶hV IIY 8D$rJQ7!AtZy>0 p1R-u,@KdSFIVU߲THCsOhSح|2,w26Q8{ߢ S}wO8S*ju󧧿TO?pu?;as?oO?'5Vh_#B>L_AgO-  x =Uf\W0 u5('b+E-%~c ?{ƍ Ko=pJ)ۻyJʅv)RxO %9`8#:)[!uo<i"SL&3 zub+Cghrn\-+,qIۤrdE ޭ "[CBi~,tGsϣ%܎mVTT' ?adđ2Cp^KC8kIObKo;ᑄVucYXJAȏ)8QƜ`3 )K! %,MKD&@2y&+m&P mB[oM#S\I)7` r,u*JveQF;[\aNVnS)h!uM$O@e y֜HI!`"+tJ`鯎6.ؑT֙kF*JG}OTYַj骄K`ǖiVL?V-P,v^O52]7гԧ,p ZϻL'lgPnȕH yfTEXehhTO N-ўBj:lH[pZP-*OQ&^ Tb%>81( 4*gc̘ʋ2l/ʎ 6-ұ1*%=ݠB@,|}I&%QϐC7Ԁ㔣urA'וBt~/aٝ)^n8xBa:ٙٛ-hY@?<8sj_"ix=hd]dǏ&gr?|ߥ#= !i7c̛yX{ƼɵwήO' ;uj*#]5f rc-١ ~|KlV(&CZ GPjF@) PI=J"MkBWʛ[@!IŸ{eSjEz̪a.4K_wn(Q)١Ku/ˠ6u 'L' XBrg{ZYs'HAU84 Gˉ6__yk@;έpcNljJBGL&V(MR .2'72%sSJ5Jr_T3~6~⦰;], ] A@B}!ɸk[p@~q bi\Eb׫W{x{ha7A9r S7~|7TWi>S) M!f͛J07PBlBsvK(V I Z2U8:pЎjĈN9hY)i/ݚ.G1u dJEՎ3mk?/"SR)xOy2ϬؙmuH-w%v ӛ/fv=KS_7vsolhD1D7'Jk rόsҀҡ -C@(7PM33ŎBIDdM$Lb$f8|'ee0HI@@!yb$E5`a6vxHw X29ǰr0i"8ˇ0I|b(J0bHpaLjq6?tvꅐ:/K-,`P.5VDx4Bc hS$sT `6>7y3 SqՂ@rbRÈ3`)A֑oZ7M]#mJpDȩ ! T7 Ikr]GAthV *KTTP#DvR ZKǫ3=1*T=^)a;f7ny>kȋHwIHWH\KlO$wz=\"^y'ʭ( Zٞ4[0n嘵]zLO]=,Q %Q*Q]MJWVVąHsYwrkDZM=!(-q0g'~9&ǤEow|o;gv2jQAwamW_aWD>S)=95 &J7^,iYX8t BX:Z֗v᭾=/ݧ[Ep-V'[DsM{ <4Ϝ&k5$v⾢̒HI \v\1R|jgH%糷_ou 8o5֧fm^ NKWVIk\ʄՉ8'$ኣPI}dOSݚZkOUO0FDSFQ]{נ]w5}P,oq)fledd߼i%\:p.Y>,8?f~V`g@ | *V77tT|~yrse erbS+A-6P~ 6T'} Xᨣ^ÿpqwA J7}J&WES/]cSNVbFX2Ǒ_fFO+g_0'^4ͽ )io1}-rO+6eoעHݚhӼES>-~]R_LS.<ԫ,?cg-TC~?< _ч,'3MnpEuQ| ]b=$. 5K|z>~K3}'n^ K_%oqvhp֬$B$ա4*\)J:xE֬8^mSŰy λLTKEi`vU{i?t.%ZBo^g G\篇֍xLI%!N ݕ[Nܕ ɡq 505ӆ)R&f˃pO`tobU#>g7'1b}acX<+ptbޏgrU113s(\ZD1Ig.WUp?S]ڞ1<ٻw>̐J;*BxPD{WpB#ٷ"g4MY拲;=.Ta_^<| J(E`&EoJ^刱e(1.~IN\m,W3^)¼db!.O\DvHM]%!PJrZVdJkh"GZ¡xO3д%xLGa*Č1}|5jdwQc(hָAց\ ?6LzWGm\3w3!n$wަΓ,Pk W-E=UIt0)jk(~vffd{ЂT=.,ÃXB/9P}O&0pra#Cl}]1KHmv俟!% )R\Hٴ["G3UWWOW}W^L7]Ͷ'xd|\v'/vW(%DQ7s_i$'fmO*MN RGJb OEI8$[e m$2 : qvB;.?>_&>~o7O-r^=^{[0>WuZ{SHULTZhHsmʥ4 $ ʵe& 3NMgA z0ڼr:HfOO֧#/ڪc7 *?-T'%Iu/(,**qœ*T4)zϽ LJpXhdzM!(ҞBc9C&IK"N /)Ih8Wɣ*;&Ӻꪱ£ʗ4o_]ɶ:Ay'rF*iE#"H +Q6)"))XW`hP8&t2`ZHsbj O̍>ҭ| c"pB?xL\a_&a4 Rb;9=,%O,=?[\Eٴ8tbkbNo ]gwu{UC@C3=̬<0 >p4nh0x y'>h MT3AuРPy' @YbE$"Xiѥ4&vH{`Aoe4YRQI7 bT\ݬAO_ t92\ J>Jq@zш3hl[6ᒪi6jSl }i*R@w7j)9͝]|b/w~evֵ^1\5xQ3{K3ܘޘ 7@.-fbJņKf'%{.IԌkgTD}z k00KkEo\)9n%gZtkIB۹)YsrهOQ-fj%rJ%;T(F`dtÕX;su:A^@Nuqtϕ{6AT%95p=5=ìt& i.CVÁOgC*~^t8ݳN@;I?˶~m p Rl{'8Y@0x fÍP]ʃnYTzmUb '/{UGK>U]oȯl/svY tĥ/ogeJ9(W ^YȯD[X:I-ܠ@O }F'|\ӫw鄻_nTdqJ*؎뽹x-Y%뚃׸yQm̜strzl%Q^/ejV| w_>F3Y/K3IbQ6.ǔja8~Y../)Sc+vɶrEߍ6(v Jy"~y*<|Jɪ'R=.7}xT@k*{ gd&9[ ua̒"k|!2C@HSeཻ8wY⋝NByYyqE@nd&CSpw׷vf?JP?b3YF\"'- &ɲ8Jym6>OF?1bivgaޡ e?u{WhFSrP~%aXʯѴQɀ&ǗWH<:u%>#oVU4;x :11amCSr}mb~s}7\:/46` '{z2w9xe%9X,FE}i%/y?F~n|YmdܽϦ(Jԡ[, .M w@;gAI1Kp1.Xpw,zy9=*YR b2nydP qnd#_1~1/^Q9Zwx4lԯ_WM85 ť.*E"[(*7!1ל9uc cB(k 6>jDlUsdͺayk&t̀wX7k:-ڪwl\\ZaS@vϓBt?++22/z[ i}hKʫir?{3Kj>(w]9 &IYw-G^fQ^2[ȩȩ$-a$ਭ @% #qR"Q&0QHK6zԠ'GSZ|WƜfپ&+` `V\H&tjVb8e#s#Dr6'qFL`(:)JsӠ?˓ݾ)~Rxws_&b1mu$*%4WPε{5k*YTwڽnkc!Oh<66zFZ I2Av6n$ +2y.kU,TuI} 4FZj[I/zB`T" 5^w=qhZO7oq8#6q_;;D6QFX1ǭ$tC6X+G~`STL26Q[A5.XiDHՉ6&2+\+ wL*rJ$dy?y᳦߷yon~`5?n :N['4̙\e`n]>FL)K QM{?l ԚtdGo(238~SaY?4=!Cj:]\9%P!7^NBm OSzƀ|%ߡ@;9Yrb3vG֯?*Iׂn3uWx/=.jf*߯[A_,_JPr 3}8c oǴCqܟ7J$iϡi# k~Df#|A;g!?Nj'+<׵Fr* Pğ 8,d̸!\QkR^r|Gc|:u՘@tɠk0L٨{ۇlB!:̂?̾7s:LOmkfOs:_ZZ &}Dev[,%Ls$Q ZToYI>\rAgSx9,M^. Wˁ ~A.QDs(>Q8OISvOq@1uK9Z yI٘3fDk)ջ󦂣J&Q" _fuӁPT"k)#)2Fpǜe@{ v[j]Sj=&T U#N}mӍd&i9 @H%,rWZhVSo$xL mV< yɄT+tOW6ImP(:g8#1б$HM"E F"i-ts$>$`a '4RL>` FԆl \Z|].bJaY2EL>0^:t"jqЅB å($,U^#VԸŪEǹE͎ ,%LvP5}|n}e%G-E%ƫC@0w/t $njJ ^EI#|åVd# W5kp*k @<*M05,qU12Gp!T:%ےzRS2ƀCPҾtomnG>MߞLQ3ish&b8| #{atqK2d]Sb8[ulop_Zc{]TpSDXaŋ??{WɑJA/ޙ*qQއe`01c^-r[eקQ*'ԶmO^<q쒲A QǑ&KMSu՝danҡp0S}oe!rIQJj9CekѲR(\b2,k'P_Jʤ>k:@ cǕlܥwu߇P[w<\)RE7D-] )dLlJz%~5׉9k}οS>^VW_]3ZwKdb!8cIBYj\@W *G(sXˏ3£ 39#PhTkqg"Y8Sbϼ|28ϗ_@VM~0*M5;+q3˛ߦ_)~dK98OfodoV[6G}W3Q3mv#'CXH}CdA=$TͶ [=IS-!!gBdWi9m' Z[7&VKcA7X;WdIqFhɆoF^znR\&`f H [3Q!Z˸pkɷ[WEwܔ $(u䡡Ñ+":ЊpgB-JܟjBzpN?դp>X-W 4fnTggUj79<xLխ0@Dʼn$ms8tLoή?ypf?on0c s*ΡsPhV00ڳP[SՕ>Jk&o҉ρ-vc(=y&__=2@d/vusΫȬMR5XQKVEeE3#PrPwzQ$ $]?Nn]~?-y?57쇳f?F_^W7?eUxM]ܽzߋ;,^v79k~{|\z,o+j `TaБ4jvʋ8\|Q=EtF 6&V:( U0I))iv' .a;tZyn=ٽ 6Sm[jN ;E9s3+EʨZ4TBIp =SmU\k )X? >7 bM&ɣ]u?yFm?)~>8dɗ~]T77?~xTnmkŲ|AsPl dB?UV1W] (:a [LRAHA(ͤg(.np&'D;0joXg!ﷇ{F28qS:8ȓ!H6uYdg= H,pTX>S65.nM_*P.7uZ%'`2WBm ɩVàChcp&4frbI /Ihr>U@ӕ &DxIYTNR>!ٚM)ޕv.oz˨fC~ \dbWzHcT蘳R%+9PHe J@j#Vx yfdyx SnH *$&Ji$LNZ*^ĨWxh yhgt秨XZS(T4غ˹c;xynXpn HhdKW֖:?7)[YbS#,6U{S(&mhM0CxT;j2&Y%AԖ3b?2e0 1Jҭ9A8Ѱw!,YŕyKh.m;PnL;_6½ęNݑo]oM,gݟ|}>y ׸/Xk!Uđ tx-=[&H6%"T7T;:E?cID6ۗ Rd;oA9%v26[);lCT3pR(;d8aA:܂V3fOÁV°$mY{jC^雉=@v{wc\9@E\/zxG]`w@9{H´Ґ;ncȄghw 5dέz2F$Q(BackRp`Nq `T|+),](?=RC ;*t9CT{;MfG,kiNcю_@-.@=˽7ylշЊT+i^Q;Jo5a>wBugPt.hOUtN&E^nA AQ&qM6#h+WA:G+͉V{m(JTg;X}" ܙu~к!\E{vd@@JFX Uaj9XU؛dKdcՅKT){Z LuR:EsYҧXPD $0JbќnOC1t+F72&akpr?/σu (k4Y12ω ɃE46"6PIqPiKK eQ@t0^2`kNH y/iW.xkKF0yYR{)/Bq{Oi ԰m<ѼgV_Ҵl<gn|qѱ8r0q5[#vjfaL j1F6 O63&jΘx+ø/uB;rV F tJ2HդIKXK8%Qr\5Y{gXM'-=n-uZfΑu=+ 'M㟒2:P*R1Y7I׵#ӟjپ@W]8=j*yH^:&TT*@PVvƵ/HtJY *(mcyWAeLMҎx `dHnYCQ aǞbjۑ1YyOvn І!p[~O1!bX[1#`jhYWmSX# {mEb>D kDB`VZB1bdIP`t}.l'pFqKJD !)@JVƋimcVb|&ֵY -H+?`ןmy"Amx i,5HDyXxAu{2@ yzX8 FƵJ|IqD<٥N2QRYǽI'Eh;7鶐^;CTCߐf%j8c!HH] ԚIVQr[5:om@gf]Y@` l*ss!<%W0V .AEr>;T2ʧ(F\EU J0I-%I.8+B\Q5ui}Z[KG!{q ; e6%˵q7X$0+ Cڙ땆Dz}ޅ%+^eNd`yg,(MbْZ 'tmlȩp 4C4yC_&U*a c%hV9&_|u.&j7$f7WLݏS_L;|m˷Y/߶r1@8I:cUMrVBkiN Y QcDkmDy658Oucס/:,ڠM2ۢ]Oieʬ X[ECYUG%Ht(WDpQI{F= ޜ.x>Xybk˂TF34 hQ7Ń.nLL#oX؜aƟj_DTZiTtK8:0ACq֬L~%*CYE9 Q۱;< wk|)EEiM թҺ+ڢU%;n-B3ە7ΔJzg-CM $a>/^Y(J)3>wGL]gW#B{,HlĪ7U \p)>w`+iaq3m-6Ty?]鋳Iw]E^J7,9țQ*;oցŒy Ruuod 2Ezj30L(ɑTg5hg*( ,I̱2d|j#{4mk#_{%@> bw)4Bԉ=,濿$cǔHJilF:7>ǽf~k{W07 b 7J8?V)D.{xf,=}369PX!~wGDuWz#ވj ´jg=^xws ݧk~.FP)W{;DPj&\ǧSp"e!O&_}]t>$~]Eηs*<̌d#Ja<fWGo&7{8>ۨz?&֍AHҮiѽkq7Oi^*i5%w cm׻ FϺW#m G^f7-ֽzk_M?'Ba@mv*ך 3!09AǪ=[=j>QEp ɎNPTz֎貿TS!ao@/$WXKiODd)}&c8 yE5uj旑xz>ϭY0.~.Yn2b4쟯F@2( ]Yn̲tJf%=eﵟVUZ *{]ow xyr}q>UMդNd!߸FTPCM<+Ӊ}GvT޼[@Bq)61d~ U ӻE t"ȻmuZ;ܻECzhMq@Hazo7Enh-$nhﴃo7T{n2nhQ,@->y?-vt~դCj29}GcNX->-xBq) tbĻ/%uHo-.лoDlJ ?ػ4-RA c;nK 7-D[4uhBqmmSr 1nR7A7IH |Em8h>Mxe8oXT~DdJR+%xV@z}Kxgma _1 @$t|2]d7*0rɮ6fQTG!KCʙ?#v~/be $D ס8960D3sBi< iB5L0x3=M|aTrڌ`CԷƪ(f> ǁczRzbzrj 9$J WOoV\m;&_1&vnHX´!-4wɯK0` ?BǹO/+]3d[ ұeȓKY!l ?|XЏ1cIyzvH1{9?u A,B!fP ,U9<JqcHB{Pw^pl:F}USL&`j6拙uf!' a4)q\8,GcZH}*w(_SJ,PICk$U ǘ'IUB@TR;x՜*+z=JԠK$3V~`ȇ∟-ֵ)١Lh5hncPC3Fv;T_FKGv߁5jQx2+5,Gh~>nI܆My=z#}`H7޼~[L`1:݉(_د~-[UOA /K̪a#TϹqQ!EF&YK{C*/8̶|-|@r5XᐷC ^mm )ph y$!?M3a|49ۭP68MjvJ&JEGj:E!t=Nܾdn42VBis:it)hL]LP_`rIRR>R2T ^i,bU- F@Q),2D7}i's뾰r_cɆ"h]: l7+?by9}T@d &9j̥!DݧKm Z*bd #GEEc0Ĵ|+d@(͗n;!i#_ Qu,o}1j,,Z7> j{vg4Uӯ7 o( _ҵv{mxsT6l؀)NNո$4i2vI-ʝ#' hF -\=Hm&ZOї-ك[4 Pog4)oiQ[nû*o9\ H~]!F0A9 ۡ$d2$C(1Ă)u3ʈ:˥`9x8 ;!h-;?nLbQkC+9 ZT R}(-5aLN nnIf4nvm{hw9ĠVPB)3"pG!|PPcs, -L @+C!DB0L(T@|3}G+ ḑf)ol2)̮oAlK6גyd-ۙC'!\ݎf>o.*Hq3=Ygs٣eYCe`׿$#"K]E3g`OsA#-9d HVhut,% ;p?P}yQM{lqRݎ'k Rf%KԎQ;q'A ;Xua!t Is %yKU`Bij =-ST &u0 {H>-@)UAV_p15iyjVJ Ves֥W@ѫO\$ T#0@Z߽35 O~R0GcfyUy\[_n_]*OC7Zɹ2!JEh7kbCQ1"e c,Zs"$RșP! ŊXK(9z9$N@F *! G.x! /]lj'Ȃ m)s 8<LZS on>d=ҷ#\l]1& 5z3yr~ S~ڸ\X櫆j,VNU{Č3ۣ˿r(# Ngi\}d z']"k!u,&Stpt#!3ЩXbiXַPXdTȼVq2oQ6µ?&bq56aѩv8<ЩJQkK@;rƲ]LOZku59zDtc=q48MF}@;NVGDT{"4{uoF!R?m1@pO1HXJ١`3P^4-}m'_HWnW&% . *jD(Lmn:z/'I" ׯN*>؝Ʒ\h(<[CshR1Hu"Ɣ V&(vdO|cr9&(,tHXK!nZvj#Hү^ǦUu8Q+#pM!^BN$/A# IROPJW<(^'\e1"e9eBZAi_\ ʈ(KUq9RAs j3O}}鳒LWM 23>\ G !/N|ҁDXάϟ{/nXW|X\O! ig-%=*:i}8}Xei݇v|9y O4֚m QOaa.m4l>ִw3ʅ433.e+9#4\(Q ȥ*ri!j (xnxz)hU_@r),ge%WZBT(2tfJd4@CKA+С.uV!' G!9+fZLBF@ʢ*44'PzPΥ`ٗE/ R t_܆<KDê@sNuD"H0<M.#9.Ҵ#Z=V1mtIQuPZx©-` F&pk{®@5eRZrD*lAQ@m$#Ϧ{읜IA{sB>g 9&vB{ ɩDh#vA g (`AakP'X7GKn^6-wy_?=EvW=9q+͛w@7u7t{?o˪eEIUYd2I]e}!\|?K2o\qeX¹HQU=ol9ov?rY.''sl?WpX?*orCa*@`xՍՠx#ZJ[) M/agFĜK)9Ky ܢ! JJEA1*;SyncT-y Zϓ^v@[>ӯu@z&HhYwk(i{7<;HP.QD&H޾9-Mr0}O/hwJv3*F\} пϖ(K(X:3hZ9XvT:LKqay手 C( !

7Nl Wz7ubҚ{y`Vl ZǟZLoY//TKbrW Ƿͺ,|&eS~>n6nbuc:n]S.zn<[ M4ʦ>d1Aۀ<2ow8z"+ޭznZ~+7%@]t"[<]s[%_?񏟫rkw7M=ܙv,7,^3~RN?4}]&+U\PbGs/[4jo:}ڸ~Kw/3/+uh*Rx#xZ)́j[3sS$]h_}w+`ofʑT>cԹ$ݙyUGMP.snm>YZ)f|gBv]<;zHQ S)Az(B9N=qv_?l8P`I<*; :w|qwT*m^wAO W7/3+Is25#-iNwXdW*IX I~Y.[ܿNmoKOPi-?9-Ov!"̯x|*٨uHýa7?k;-pА\HqgP , FoFjmXvD='Ǚpvbn&*T^@=4XrG3Zs8x(<v9{ɕ˰+TR7ۢ^W>ZuB pTYV>< lel?e}Qif̬4Ue_eZv}-\w8 Sizu ٗ]Mv h^JU*닫|y^*%J\Kfz>oFػKUdžT/v$,dG7D[ܙgKlS=|]tߴEB# tʹI3 i@i#+I4$gZ AEbH&G=p mʥn;Mb_H92ԯ]͉rn,\1i*ngVc2@ͳBT*cR1h(h)r.$<z,DB+T9,׎`YPX+̪d:+0~h(K/B=O{[%kN稉% zm߸ְ[dEA;hrīm}BI;CSsΐ"ڶ?`֫G/{RIF(S9΅-|*nj)S˾ϴ2=600 +eJk'lҋRATKפݗj|e[)x^:p 6cMudg3,XRI͈nOVzVjJ(5ovWnӹ Cy۵ PH`'NzsAhvy6#hTo4QmŁ߉Y[^Ҵ9AE) q#\%nɝmԋ/JR:y=13^0J#3^{f<;zHŒ( 9TǨ1"-C-%3^NȸwEW0vЩvd܀AzV׸3دrumɬx +IfQq/ 94&-lIQb8uon$ o}0 #y$ԘA4j!;< #T<"K·f V)>(5S Y*]r q.ᨙWP!2>+Ә ZYV12*ARN|)FGoO-x~QvϿPlN b*$(5[D3|8Ը։ds_8DQtnTԝ40$sPGwKuoA8݆ېVL]ݼ' +KfeJR;= U`Asn%($%-PV52 zjQK.p2sZ8Ee2@.Uݱs#ANYm\r"e}8R&6R)Yo|}&Aٳ=R r}oj [Ή).:{M%,zp}m44k P3.6X?.lƸGqw6~?pHEs O"̝,|VљvZ@v 'dvOISZl$ |>,ö"G[?wK? PD`]Qj' 44E<]L! j7>=L{2R7 G ti<#$"n= .3Z7׌CKy0(  +]+*AGJPg:GEoIhi̘"`TN%gv5)KWe%@Yl*UQ0yB)X_> QEw罗he{ \ J+edA%8gXE SI fr,Jt&J*(rcu⇼s^,fW_sptl$F̐%BAw&q;b1N7Ѽ#(J5-d܋Ž ϰ+ GV"(^f!4!*bÒiMDf+-/ †)XVG+gZs5 e!(rHvZ>>zɄ gKal*l^ )(J$'ZJ; U$8V1ʇ' -\sZp{2-D) q@/pfe"% N4;-4 %=bN4FҐyW])%q`8h6r+ٿ"K, "q`7{In;c{%yIdKcZM6eؖ"Yu b:!7Mk_**_UYr<-?? W/꾺}BKr%sAeHIXn2nb,R%jPSd_>?VHfعloW3$" u^Z]/oG QBK]^VSa!i ̜H. cJjC3)-pBR!siV`VcK Ȗ^bKMejt }RIV&+ 9!eM-*!dst?`, .|TOKř@.l^D4Ɏ'caILhFQI(:YP eR5 gNRACY'[4: p5PI'Q@Q(B(ɭT=P4@SNdDԯ"aJ[>U@7M [m*LYY Vd~8 G̽TilA$!!l]xB5 |.8# pO G7$%<Ϸ%P7Ńu>S)糊5 t=p61i˟!3R'M[d'q#0/GYf>?fVJ,_}Қ.dTpo|Aj9W!5 ԸgqΨ;2~U3 &GA*"xѧ[6[`񿯪5yndf\cg5WCOS.yHVyT7|y5={69p}RZsZ;{"ju9MxnR\⶚k',c\! E$tWmo|Or}*C¨ߴ?\&Aj\B^dž2 N%8gݕk=u1 $AopNv5@s| )kPqS&TQt;IL> Mϑ܍J&b X|nXV3ρ![cky== GC$H q=|a;^ \ۮG)?% $A>Fh4~/ -IE}̢(#DQ%mb㯱0/n|npt5"8_`J UwuZ"Ajz"( hoK'+5 wN穋,q7j5T{(L(2|:V9pQz^Ա*(+2:!҄B|vgBVQYȧHAL60B`ɞrHFl/ry'kFfvFNfo-H1흪;6W|.s[:-Y闳]Su(*w_Ŧ$xj~MoΚi~={񩾑6&gŭ]Urj=c*-Za بe?bW>-U~[ =i|⎎<3w$Otۛ4z [ B\'1xT`[:Dօ|.StgsvsϮutqvKA뤾#fvqS:P,pvC>smS0n -յ5{f˻hO!'3/߼`oW' A7oNQEj쪝@.@QnBo_i5eA872)#2'rZ<˙$wr@ JSkcTk`Ci='L/2jsriUrM`݂rt1-"GQ:sa9x\qH$ ât+Ҷt>ìԒʖ澏&w43DYxwj W0F6tܬ5֦p]z*nyT™{U*B_ju_[oq7ϰvmu6ׯo/k{5////ꦯ:D^T{0t`s ~r8 |k@PzS̓_kGT<I0 PBOc/`Lz^'T#ɀ?>ETd*wAKV2鲠9sV PpJ.l9 :G$gaR£F> \:h* RRkC3cZ("Pu J3OZm$s˞ޣrᶁZy~P:T $D{ nD( COPn|" $?*=T, {*w?&T eL= 49x䐁m SB?Eǐ&/5UՂg>xn_kBB!>,E>>>>ULLдq@b8ÅsT[2Dۘ9OOa1:!CLK$^9𫮬`%(t9/n488"BX70ԂnyoyM ktVWp_eA %οԑhJݲb"R˝(,Kp5j d!/Ipvr#)5Ș5K6*I|GThTj#К<3wb/`ޟݸDR B\'1xz=얞8u!&#ν Hxg-!m]F>g٭ ]S:ЧBFk痢>oJ>~əq^ Cpn”LLb>%Zi! C It7y閽 9楜kԜ3p?UFBA2"(F;743:/"BQ^H*d. a ̟3T:__oWok:wk\X d6. 3$ HB@W6n eTyu㱿]~|.aes xC?g^-?3{vlĻ,<&G1es{w٥o7ikrWt֥b6w[޻-:4RmhX\ Q֧F7=- L-=D%8GpJ8YVvcxLe"=p,.}:RZ8jnZ@΋1f|-W\` ʠ2$,7FXT@)5 YJ(ˌiB6 BQ~x牵/}z<\5;KQNRl^ٜQ!e_?\ 9~_ڧEYQ|h@?%ySfNj{scm(0kn>Zb<^ Do,BI:W}!:?Zݼj2>xVx$T#j*6_sV v Qz Hګ ԚA #A (nEnĆ xG`*ڡc΂3_'AH}V^&f…_ wwI7{@?VMm7aG߼}́yn?:EdJ!߽67oWvݥ:W2Mq{ۣa6~Uw.-–z=7 >, F\ ^7M9Qy?UrWޜtр qs需Q3$הe2bW y]GKJ;- kW$P9A_ZepdesnGT7{H]9z 'lRijr jQQ PrOwSsN R*$B#\yp@HZu iB+PFA*\rD^D.+ M Րc\+Ze asD)܂+< 9#!E)U^e@knaʚXRYl(eBfKK4Z^P C,M*:GY*K:rmx'£0y4K19!jB*] _8,L∹,VAyk A@ΣӐ ŀ=lٕi|DbñtӪIDװy*+Ɓ:CIO7u".PJ6MbMҔx+)7Om7U]s{-+'/A]ۣ?/?w 5/ꮹ}@F-tI}sQaEY0 @e0e#9--E1kcl9& vg/:m| F_l!/mU Tc]~+*>шW,MBeFdyVKj^] :N\W$K;Z+wAoo۟a{skٿ~}3tCY9r$·Kѥtk*1Q hVdT14(u4՛͝3r٨-ՓlÝyK[c$)Y(QۧP9aɎQǚ@".T5nmCI@=K䧥>oH S+}L1Kqh7]P]S';H1‹ܝMBt&⁦ ^Tc蓍urBt#ʇ%'nѶjv5{E5WD7gӳ9e@` b`' m)σ>sX`zE_{El>=8zWD=Pk+b8غ qJ8u+b8 !\ 9';NX(Jol%7Ñ[սYQ&|=GXV%ǚR[DvI1:j)_ʹPV҂;KMe79%Rj :Ii5*)X$3`h9 4\0v64gǯ*f%cw)n+T0j+Hy59J>Y׺gY]>7t&6wO1n|{ץ6&NSAyǫRR D$֊a‰8h h4Uq,QlrZ7m9-LQ\]H!tR?NS#ٻ涑WTz˩[EU~*TN}H^rJ57ڕ%-EOHK DJBPK{/6 q'WS"oq B+  5Qju>4 n ^2H b';_J~*իW~HygX .0bWe*Ee,uuS-)Oh6Ou",wgb G^ \7'W-Z,LCY¥N: Z% "XBxpK-8%5AsNھ،p&/=UjHVC:r[|E/EjRmޒ3iKraIdS@VoQRwM ~溟? p؞mLi\A]SBG ^ff\H2 p`6JwLZQ`<_/&obktDRn*3Jg7H{va<>y7,pC HYX'Cp}J\[Qfb>ǥHa0Gq{TdFhb8+i0%'=Jџ+=TO<׸$_2T=};*vڙ@1AT|ڣ=dj:dCe> eo<`QD}J:EXJQJamh< Rc5ƄvRfAցrc1FqAHs&a&TԠ19#arQ{w+v&gf8f/d{g)<̼G,o02=JLn'kVsu.VGT>mRْ])k .7s ކ# 8p#HLXˣw%.RKxdn/5dnG{̭z4Fc=͢ظntG$F)F Q4 5Ȼ-7(apFd/Rzd1Ձ3MSrY:{E}yFEHhOgq犢*GOa\^ R˖atT}/[>>V?n9H֦N>j*T8Gquv5(W/z!!eL:WލZ![Gd hv_^Y{f{Fȟ|vSLwC6 YC3Zy%Lݺ*z!!RSFF^eGnr_K33V^5V:2M6Χdq}Y:k`?Ͽ׏'dnISɽΞ ޒ!5#TSaQVAF):Qԗ !RFRqAX(M4ךÉU9;sqfj]Mh"iJ1c2UMV q mF!@{N6z2Qe] Ubc&gMt0ӳr9[>y:'ܜlDcu;X=m IsfKD3%Du;j}mbL1[_ 䲵՛ z֗]wTZ~}fK8Κߖ!uqz#rgT}~ ښ4og:)GO󺮦{&ߒ!5KN3|،B4s䯛y0o|ٔZs:bH}Zp(cѕKaQ z|=>(U>Li2iuUx '$-S0)ƺ"5U5=D!ñ8ৃ*7m+3nRPȠP kP[Cvw@mGSl OJ[xu5DE#~j8ю2Z-#I]:tu0kI|-=Bn;ၔKK,v|Hj¦= ZT*${&ٛM&S46⪃=ň/) pUOӧD0]OK$&%\ ܜjLaI>W)6Ư^B3VsQ  <T ˒k`Rr(gMTl*jY,3юYjCIć&A09!įO.LQﭗ8(EK]J h` Kpj`m=֓$QqBDݳr?-{\G3*u#wLfwGfPC.%ps5K"m0`Zh[t|,yTPe4{JU3zy++SQgL)Ve+Lb̲$5Di(+IRR7N{YsRNl0 J@HOL!shbR~H!]\-Q\{ZVu) ws0umwe5 E\nD4ӔtB46)ׇ ɤ6ڐqu@To֠Є̔ >kgƹB_ߏe""` (IB~5(ւ(YwMqڃfdA f ժBͬ)ٗ(liA>f!/3%KliO)uJ!M| U{'kJ=Ѫ~hsx/{Ī}hW9XL(z#f^!P2sWbRQ&*^AJovaJL =ƃϲ8INSa8\Am+D JѫemZ[@KUG5cx&]-C3CgLca{*ڔCjv֮riϩ sJ͌N1֯ЉK&Î\Ӌ;<>X=^ g<-?K\\>\甩B+矣]_8 7l6pz~c:0*݇o:9Uټz˟+Vĩ竟a,kz絰ǧg9M,xU/*٦2y㲸'.럨F Elvmoiv(jjl$OT3.T]'PS=:h`Rn:r^ep: WvNK Ӕ7 h&%mŤ!-9\1WYz/0W M x b3~ye˖75DLJF.*h{a7<=xኲڌj4a=͎XZ&\?' a`6lrgB(y[!;9&jTmz~EDAHV__uqȅx_]~jM-Bc n_ș.9+wT9 " \ iK?,ei5W ꂋ<#MR2XbbEiaQ!Rh*eZiŭVRF,1v2T~#BZX J/- ^TxA{0FJu dAP#(x9H ]u[ QZ/ "u-KO~ghU!9e5y'hx6!g7[eg~4J畞f!h+J<:o&xʓQYwM#ZQ3|uJ&(BtOׅn>m.ͭI+j*S zSr5;w @gY`" J&D[ tV[>7oZK,3@H#&$۝,Ͽ?9⺴\'_rqG\ B484-4PXf2Rys2ÇﯢHɩN[{zT}8-ˢoU5GuV[Ң*ָ."YHյtBk #~OUigm@ u`m7M!f()ay\p2qvn0 bD৔R~^Q5 Z!W S! 0߸EzDW5%ڑ쬢B ]|wR <+eQZ_$vQ踒7nei:#6GksP#Pgw,-\ ,Utڙr޼KX 8e;+j; w%>:#={2FSP(B,gnqV hǛ{ ]irE7- !-,p[8"w|^ǽ-;k'fBڭb%_"A$;׸X(boE ]XonpCWEGeJo( BEA967zX!U L ??=J9!/!E_8pFA7U+1faЪl쬍amķ=`e4 %8Ģp^"uϛ몛84t,$ VU޵#bݼ``Y`gg^4xX#Ɏ_Ŗ,i)GY$HnUB܁T1 T볥I)'i5Ζ<8"tUxfPy],W|狖ehjOm @onGSX](G9:nZ৔r"w-"٨a: UtmQ^\h* -fxJ!Q*5:3cJR`/E }ZtwCn~)Zŕh-nlF*\il)9XSDfJ$WUFCz \ `̖fet%dmWu.gD4J, `}^|wfw5峲Ӆ}+2$r ƅ$.M{8VO>QA!v5z͍ ڧ^F[JT䡒cX_&+CKRUUI1f+0H}oQ$#am>eTZȳ}uEK*RitE@$^caA_+%tZ$766$X0ZY&-Ae=^J/x6_Ln37ǝV0NdN?n5uZst !@d6)?pE֢.VJ~]R\+C!Ufvx&ZOc$c5Cem$唵zlW=<5<ܮ&1m1I#3FF]@ G 0͜OL7_ψ+S7[C/h1(;E* JiYShz-*hMLc}x *9 _FEjFѠm$h YEq`{bx8 KMQU@gn_ˢt 5MY̜@+c]~T8jkcZL4a~Dr-*9(l I9$`X:%a7'#d)*7z;~DƨG݃UU3KETY[/i0<1G+Yu6369}@aY,]%^BDOY jGO&d:mcKI);!2ܶ~̕\Y9Jp/2o~uwX @-~':0^9O!QM}1OZ ܷUdDxxҴhxYJƖ!9O! L2ݴH U(uThѳfCtYcDĕD%g@%kvVX{ 7bv5u>b^^Ғn.thv7v,`liۊ@9pK"řX(q oTA;%;:dߌgJ^?MhhDr*pN3H$Az50RM'=a&kSABVƣgV&h0bN$E>`e1kQ6F+um'kdž|}&فDuBu:ek+ZI]#A*P6;;jA i |:il釔.1EU1fW0}HP)Ȟrj?gYQDž 4Soq-k>ܴjU %nj lz<@j4N)Ѡ0kK.}vo$ZhM6 YGfT<!sLra=H(J-Pu: *$0t*/BU:ٿJgceU勊TrZjjU,1QVhJOS RZc#Z웇S$*ثLg醼:U+ is0.?Eن0X5(^LjEek[ ]ΩH()rJPQ"ޣO)E S)+\&fh\R8 9ٞ PPPBL EQ'U&V[qmU20g",c>I(U4 qMmn~d.5e]a~5+쪥ϳ#ZpM$҈JY2k{,]_U9w~$ H!jdaE!mHqnS&9̓~}v *{*ucGMFpNx8Oxw{bѰj3*lq*f5sf:4RaIZYD^Db9WUpih`URSd֧1ltY>[L&|sqDs!ݝƮ4~Ƚܾ́. Aɧ}+OHS 1gaAeqk6^pcGw+Z8PN/9U']<͟Ne*u^o[*-O'>)#?inD{z?CrkMr5 gH^( {LjTk;8biG%x34;Y^xiD*-֯"5;:m9ЋZSs?m6{[OO~=|7WsnIÝ_ⓣ+m6bф1/&ץ$bwIW~~GnP2ա%|TTw?ⲻ{O *UXogMJjl6'V>۸Z{'Yi_˻q0 6-TJR%->32W.r83@+*DMpE#Z_lCBrޙX"Uhbd5V-0s2!Ы27D]. =mi=R5E3PkE3I~1fH5S6 u*830^rL:eV#z@AYSgciMˡ]> Cۻ`{QGis9~=ƷZYi@пWѪ{ c$;tg8|]1řb)/ 9BU =x`11.ov="{x^,M2>8Ma3p0j) fZHBKGK eq}O,iIʲ}=C=C %{wGN$2rB2Ɉ2k\̺baFb*V6<Ȩ}gsaL")Xnrb$WX+mrO]k#3 |:%cc؄3bwys7zKUcykԏPzLK|p&爜2N%RO"WvKaj?vuAz \|aBIIh EߊrDص/P/'WyBȨӑX|ezc N$SRg+kMo2:?`3\zƍ:D'ŃGϠ7/җ*xAIK>]\(*]@|.^F^Rv>*dWkj,fpKC? U3s棤gF`U@Eͽ.h! ڱ5eXߌ1yb F\aOAŮ/yWc$m`Z=*H >Xf"ό l)ض̙ C5OWڷj=_<ёL/xjNo@_'gP.ѺUg6U//n>b, ^|AeG}(Zd0+*VT)؎D>`(ՅNQDyG-35zȼfT XFeHj+C,:[r>4"DHqHS6 #]u^|Ӑ|3FsYHW ^J`(Gji6/ |OРw.}D%wo>~Ϯjp08q!2O>Pv6SJ٠ZOuv.4 o7w'`>9;=eeΌt1'g8qy,?QU\0W9MvVe,_/ڀlj'x~+tU%:RAImcS&O_D4^((n-Q@ג; Z5n/99kZdZ*!{"ߝ\>r^OnT|A-H2V[^'SCk_ʥT:/꣚q=,><\aQ.mj"Ӂ)jbxЯUQ ʣ,imZO~D{Oyz5؋%m(0m3`-8+gJ3k%}wxSԥd]ukKv>h- hJ.ְKWBk2./ {$*C`n11Dqˍz ˄̒l `d?ʏGn0ћ!YPڥQ(P*!u1y" aZ2ӎJc @%*,aZLSVki ԢKٕ-ь"iX\7#ips6dIhM44{L3\GOB܄@!Pi L-گ(KBeā#ZS-8+q9RqzIdzψI.<-y&G-tzKԽQs4'zO>|GTDW(ʢU*f,zZńQJӌwi.4 9̸5U 8XzA YBumGrx5Q3Li LtTvnϣy>uw_a A _f&(+p﫯aGRtE$*B#J qz}*?]_EcASRj 4ܡT\iNF4h F(C5喢k ׽j ]W, Z]"+8O~̗~m_Օ*ZDA=='e/ ؋bʹ{j!{gLbH6maSlp״]XɵTU9Olmf jTHeƣ&w4d]{.Ǫvɋ7PSz$GژtGFoQ3*8Tf6sqb?NZ\x&7Th^uo{a'Z΅A6ۮoZ!7,vZmhH }@{bؒBԪ옲 CHQ6-:}A%U޵0]Np?;E˴?A"GE"GE"GE"GuAN P#ɜ$T*SYLVh0q<Qĵ&9h^v6x?^W1qJ P@g~;-wS5Wi4n[`QF-nQ^h5mrb#b( @A1V;!%+:gLmT2l ѕ.>TJnnI ZC*C!z(Ai&}< $T*k{d0wAiᅈ>rB;|J CɆJ%Kքo܇A5a2ȇR>ЪQ!1Q]@1$Au^QeIsB2CE-Gk#͇hOK[2Hۊ-,MHWЮ6M1Pr'fCA *އsLO!ʎ+m~zM!e${~ u/Ռ2fa(W60e0Asqũm|s}hK?CV~iڗ-~Xa[oXEu90E)g70Ҿ$8(Hwpr&)'U<8g}]nP籦rS.Ci(ga^JԌp P2?TR Z.Or"O)eC9RonQ]&4C2J$I#ZNNOQ\|0oEr~ntC"J B 7v{]#a∵7?ʐ/nN3cNRȞ@p'EB?IdݏӋWK7KrE:||9k~шsR#rrV{pp3;g`t%o'cG+?yS]MU;7Vu]:cqo[pjV27kzOrjάxI ֐6.9m"UU X'uEdC٥,)2RLYe gni(aN >.b6tg5F[;XƀgÂi+Glp>e dU+(U:'/н9q9zm姎ξocNy&M]5ez¶F~P`Hۣz,W~كpU/apkZImnK~ŲaH*:/|xy3P]W<.zGbZHr7?2{aK_ D,U^'[ZԚ{]q^,B6\l\0x0*AQ1V=Cཉ4{:DZ׎h}ii _\5\ (6pmۜ \3[}aؿ+Fh ?^ ZBeABv^#̆_]u-)k?қ@Y^Ksc^z_{W&g(nךM[hh-)jCEs{,V}ϋUuWUի.esw:T'{%[[ No^BJ8vk߭vBB~"J }3& ڭ-MD7n/E+oݺ߹Ȕn(SQ55o+ k da7FT1vxXNu 9%EFzF3p ,A8m+5ʆ%i13`U pl)5.54?TT?:HDjFk YdHVlq^z_m P5$FhC+ 3gB#.  "b& okM;]+:$C2f䃿ˑ1"h;4Mk Kp Pfyɯ>.&L^sv]TgDcܐƓs=#R{ MPf4x&_IB>j[юx9HԡHVyQ݆`-M(t埶Ҙz&*?tf,63?jUJwem$G ❰ +Pf%ñvb5cFGD.f 6̄MF}Y_VWej<"#w[4h33:˭>eS̯ʶ_\;"`5#S90F #v8;0d]nߦ5dA٨Tm4$+e!Qq"yL1e `YǐܺN!լuiQu)Uln͛_*Z#zkQA ̄ rǝZBH]gTD""%HIlRVe0PeH qYt>1$e{QY8-ͯ>X VE*'-e,p4"21'|ٞXVNn*e^J*H H6yd]Jc!?f0}e2Ct\cz2E4)i&z]vCnz#U N AȓٗRmIblBmʵn)23"?Ujt`D$/N1%D}IeeUi1㑬v|46ؔƤYI7 +66*騒(P&Gb>JY@2}.ݚeabg.r t)\7> *AuEH@u9d26p1;KƀD 5j1 uDQf`A,:, ١09r2(1"rdi˩A]Ζ呅4͕G! H~\PZR⍱١V3E'0*] 50(f~9Q,] tK B#!k>M>W0~J T$hs&]LA3E0\ώUlA-x(ybLd4EYel&urzn #1NjߗFj}9y0mr8&vR2#q<7MsTLɒR #Mk[ZfOm̥{:ە-+F#uULmH䀼LP,c 4i!^DFHUrםڢ> ЪF}&_gYzL1 $R*v=8[!Nl1Niܰ6@_3Ύ餿?:1r" 2PBcҠ !P 464r5`,_;֡3DXX?DDWc朞C+?ar]rJop?~*܅˫e6.FhE W9rV-;nFEoZhr#ht?85N/Ҽpbo"k:PJ'UD:yb;T"wW7ĵRjOƞP)m@PQ˺w+Ԫ~tA׊௧_Z wh^) '亐,vGG˪ƭ?NU~ۊ?LM{kwJ)v6{Wwf/)ּ&Lb A4 1k"&Dɐ~cih$kԢ*Xt aFٸk^;~7w]jd0OǰIpߖ29*ۻ~$u[?mYkju{si ~UޭcFtu-q2`.jS&4fw79H{)H۹?V%pǕ><1 Q[׋ޛRG~Q%~K:.H*ê2|c߲ЌY_4f&hGwK}<ybQKϩ 169ʒ NRj6G(=å9k鵵6ҳZ͎ _~{ST&@g|6v蠯;xgsfX!Xme,v2/.d5KPgHZ1v4I@x"t ([Cd'([%C16osrן&ח y.M KUmCFl2l{o v}ƿ-^B[RP.\}+q0N*jTY)Ϳo΃q+r: U)y8-ޅ=;(ǟZ[*nR[Rgi8pTPZ"/שt}|זZ8OV*4R]ӧG ?ݮs{0:^6Iro8)_~YvDZ=i-ZZ >; *dMs#a {uc)XwAB<dCuoPV _^ċŗNZz8N9:/yY3ڎ?ׄ+Lւ(ip*`5DYV,ўEg`̳ \^ÿ -s7p@}Fo܊FgZН:EbŮG~?3dէ%6 A8.Tta-9T Fzzt1o-U[fVqO_]bK$^,vY^e[>Bn6(?s='`Lgk+X؝d_6b{]=j,~\鍯oÌVO%+'@' f^DNٰY@,*l6fs1PGRV1Qu%mH0A j4G J=;3vؼ~r]_53}VVZ6g+=G+e2Z iFfk8/=o+EJ.V*k{ZQ5>hm7R^D|Q59⟹6R7xGK`]8:凉凉Hkn 2uݧ/X~dAT/=߫b^%TVKtnE @,͖ o Xz>JT2hCY|R?rt`Ԣ+P̠A?pv\kǖe䃿@7YV`,P6E$[:Ơ!h裬΍|nR cj-ͳ')j1S=&y%rdS&fÉAyr/5aVS}֒xabPKubxAb@gѬ7w.n#z *B Dـ[KnPSXt[|-~|W2"}c @?}TN$H2YeuQͬ+%AXc*%<Ϙ fs N@&IaH2KJr)+*aI^ЫmV%c$$puFRD;¼9DѴ?He &iH*9)eH=( ^ICEW tz+x@CBj3|Io`ً&>_;<}ӸUV |HW$R$˜Fo|i,O"c)iNԻP>i TiA<+3##\x ɋ51 T>~UjJ&Bk'y' [P0:P2)i&mX SFfam0(cJpÑ{e*9j^K̈DN1wcJ9_QiK}L|p ñ^{ E$o߬G#$覻݁F'3+ʬTe蹢U jT'Cܣ=Z))ij0W&i%<f\igyQ&O%0qĨ1 &@{ -D(e!V|"FwG_y hr" G:1GD|ĚXOek؍uQ; 9G8y2pfg4j(Ȱ'-h;[wϜy9rt"PB喂8o^)R18Er%4Ѹ;Y??C%Rɠŀsg 0 :Ne1GǷ_DjB(ě4# oI<&M !e2ư(3o*!bhP)D yFK5Z"f5RR!:Dur)#E#D6*d`dm ȗ_( sRꨎMS iBi;-,v5/(yC ;ЈA@B - P 5 W&019x"7!3B6F "JyDVd<tC ,7q~j}y?\:2tu> RcE2.rRɻ@Ħ3Osٟȟavoma8y?p(ML j>. \f8 @Drkhn5'cHOd獠/o \w_|!gzIv/ـ 1&:d@s_1$"4H2[SMү`dtJ`'I 5@K)('Yqpo4cIɤ:9RaQL(]b#%#mIW&t 3Z'Oz 1峄mc( z髍QyL{eR6ӠΎg9Rq`](_2.JDFiζpˆJ]S"JhD.ĩX")E[T;zIrgyƐ)h.-uN84 bfiD1J$>"&%|ni57v4M%"hV On2`!XB(35 ˹fsdq|:K"nLHSl'sͱ:KX'f5VQt Gp0YcQ텯A޸|W >A8E=;ۉUwJf'V 2HeDwPĪ_CJnLf}4kD錉3E^"u(N @ ֜X` FR*N ǘ 7/X;1X` bESXupQN5c@cfǚOӝjƪ dDZ!5SAfӡ FꠧL+X5(!.X{ф+ kGX]wu,2WW! ր1;W[Pw:z'7Boupw;U[;C7Tk*7}P槳9D?_M. )>B'``}x/O\(Z]2]y a3ďׅS'ɣyŐhb< B;&^H"|VHm-*PLMx4)޻Ȃ /*0nm  ׍GAym9 C5i Iv[bXխhm|UC`- J>* W|N06BxjIa6"N85dt0-. y7GqKnoe.  TD!.n!kZ Wb/sEmpy㷢\|TBxiuӔ`7'>N= )"۹?{N޿py!u ϗ|@N;x-[2!RHB z!YРr1xʣ.-͟`[ 5/he9g:YGdV`8iix& 1%Xhm/diܛQp㫛K{^":~笠PuB(^`J+@|5d]^ɟ>|K/JM'?>5x}={.;~z{ ӗwh~̭ţ? ^ ,!0449Yyɓx?Ͼ0@]ѡ F ER:&qP>JM!'쭣\򋫼 d({9 -N[fMiܝ`/Ȕ'-`*ȧsּ3|j"z%eƙ9iGy4J%rͬT+|KQ-b=%!,tޛA=FQ^)jrANJÅ7 ݀#Rk}.NL@^}r{k!C_<}o%bz:u_9grrƛP8(C&+y2Dn9Fq%AkYĜPǒpa{܄`e!^b`]5}ۗͪ jǍF$,H{,Q1FLjg@#,"VR҂cMfv\bڈ໵FbIU{4@1|ןz? WCLv~:;8Pl8|4uiĢ\u`1P˸SHT.-Qk7rB :٦"M_PX|]l 4]E&~`%LV AE7 K8̍Wz\f,Ռ!"ls㩸H\A!9 RIiz7X%bTJZ3x,*;d"٢T2۶׻LR峃6:*B[o.oښ-YOiR*}&W 7T'ZR*۞Tp@pKno&SckJ[ITTs+0RwAB%*)xTQY'^G;AXϲJ{2ӔmDC_% n`ÈN`6D h˅NBqg1L :Pl#Qr ~ !FneL*a<Yu&<*PQVa~>8>/|Vs?HPXO|<,Hi'D)&o~HnNjs%C|O r=Dr1ۇ=̓!秧Ѫ~@;{wMCfNtv-~< |>  k`(jzkZ2` lxRJjUqC lWs1ojP\eO*KN)!0 jt 4>Ha^^޵}86 3kAP}@Rh ]Duik["~~ԎT#06k-)%kܟd>:Yy aگ1؃ y׬AZy/u!7r8?0%WLӇҨ\/RnFRO0aV~mi\-MȬMwd~Nm$3nAS:Myfݣph' d(s>r_ϣ_0n\SZеڇ~]~u8n| o{>ل[||GjCrn_Go.G9l[2ky^~ErJ5yteז)E膝7;Cp-ѐ 7&ȋAADبq0ʭ8lY>)Ue[#kC7<9v "z:$t"M.e ?Oi7YS^F̣wP>{֚(Kט.#Ѵ^[/OW2㲛wYK~5ҵ0h_b;#z |X&v(KYzǮ7 `v¯ɹ?'a}C8OD^G|7{7 8F ŃX<z8-;:m_< ng6yI-n8n#cύ!/v9>@st9x<+ % ELĶ:J!ZovkJ*u_/ݚRQ!!_6)!3۠ݚҠ4}G0.EtݚXQ!!_>_*w][5(k ܼXneۿ(+\#,6Cl2/ԩ2֭՗өl/zLOVC^9Ep 2iܷn]YW+;JYwBv^7κr8<9k$(\Ǣ|p z(JAڋ ӴRcA%3Yg,಴*X/XzLqNjKa];o,=.8/5.,=kbOb4":$L4C2KA"NQ1$"1\sltZE$"it~E\;;H%x/1y׻s[~SUK%+]\7K)c)e}b19 &#KA[MczcRRۍe5},e#uֵ[uݱ\\Xz,v%LQSMqY!a$I!2Di$ &:fIJ DRSO4?[XJ^Ԅ_|gR=[ɉ@kfߞo-5ff)\0wAb`)C~,M_Xz,#uڥR[z\qNj;Kyԯ0_BV^=Λ,A/XJKiAY 3>s'"s_Im5KxzbFD/Xʄ"@eא$;~4"h:Fkf H%F1R0aj$ X)ERDŽ$V ;P#0J,z}MϦ#m0-xa -U[ f<> a4~b6l n/fp\<,7Wߙu7IrOt-һ3-~X)&Y>=g_o* "‹pmnuЁm'c-|Z؟rX6T B/2PN.oʸJ/#|iR)O)&X^Ad%/N~gkđQiOV`?GWOrO~]lz81@%v[o4ѐM(L_;9B9](O]h="KRCaHvϹun"j_ƫM. RN^&F&EDŖ1Risô֡K,BD5q T T *@>* ۷'rNNg$%%l/GD 0zw0#;Ove|RyBi{{gkUrx݇v'gwi:PkW*1_v]d&TRJ6ұ=TDZ2tV&5}%H֥Rwa_%jUIT ƫ.ި͹߁R1^@)uN:3YPZ*%t]؛u$shSU Xן` m@dvoʥR ݘg+,On8?f*xRs]6 "f !fgPGL(2HlXe*<8&{B|%s_K9q"hur9)/,y! .p9.g VV~^JUfh~;܉(̤s\I3, Gx}Cc֫r} Y[B2TiaP\=bJk5pFϡLkFbNeJHuƞjxT۹F @! 0zhBc!D>&ΒϒS8XQсwu)vE\3G{Whլm.&n$+U)*-1 ![ĩe(Jd$FRgfQl#CYkCpFDL0>IL!rdSpiY~Q* ܻĊ F)M"aBQ-)IB2dD fbSZ CΐgzEq(-Z1'%,(42ƀ@2B D  Bkpš!gZittsC2hYb7eB%"'Q f&qui)X%A?z?toC 9OOYd{<  @'}=<d0K Y(젊%H"Z *1 1lncN, X% ™2! P-H`ov0)x*<;33kg E$.XඕKSS#cOJ<PLRRB)m-|ISk9^%CJu͐t5f(kUrkq&yfU! Fl NȔ~#ićV|ŗgD?0/h͟N8pDfp5b<?x+)Ru ׶r>oߏ:O?9n?2;0h Vb$$9=]@fW,w`¡Me)$`u{JbHK8X$[ixn${̟V靓- W> aOl!{&3Uavw~b˪@2!uCb$k@ŒeHSTjg5"raw0J=ۑFsկ_Wwl_}zw=]ږUeEVF[b1i z5ʉg/g2߮u-hDJ ˛Fc=fu󱃙U;R7#9'ѯAx" >'7 HKxcIлO8w,!/QHw4_@X (v񱪃 z35zf|>?;Ҭ/;}R&k)|3,-3<ٲ ֭kX|%4J(:9yJH.6qwT,tUrk!r Yn:TO\4Fhen|ŨP4XI8?+Fu_) :%5'( '׭*R !Sù`D+;]jBNݨeUgzIk%]T_س,2\ĐWNJ\"eϺ 3кU:UQƺut:5V=ku!8%U-{RСuuup'Hk r=[ yr@pʵ ةM& jb]v @],p츬㜬ay{g<NS}Y1}⒇-NEO^ Wf܀6wn;VVdJb].-C*9%yCmW`Jij!wjxawR[TIƋeru=cx&!d|- (sSъ? ePbngXKjMK -kkEW'wUi!umθ%3l)FCeEF&VGeRtyZk>^#˜3(쥚w`[A\tWsY8eqDՀT~g=Z (lQN3D~ダٗqQB#x7ij $ oȏ]q mOr>\X:jƬ5g玘GhR;w+p9 /^:^:^:^fy~@45 Y"91Ē&*11WrĨ+ T qD\oާ?GH:vT}hn_ֱ$O&1+<If`I'Gbz*FatE(iPULG\L\ U!2L)Bb&], ZGMϻw{_e0!a^e#qE" \h(HN")H q H"2,a8\jX,Bbe0,k_X4H.b) 3 $ QDv͚D2i k嘔AJgvTI,N$ +"T,.^X̼+k3ܪF*J" ve8)(V{!*L%iۻb{BR7'(DܢJF1`W'3H6|bXs}=dPmNT%+mf_>i޵>GneJ_rI<@U(U+R$^7#N?38$UNnN:R'_R \y=Z:R1U$j"嶓CW_L2e0`e,W_ 9$ Y2έtSѿF6:u( ojZPkۏWCGZ`OөT|0&'Y!TeEsNڀ e$snr Q bB.ĎO-H=޺QWi-[Ј᫚O۵BF ]B<[D[2E&RfP7Ri$8@d ̥i 30L%אr`lb%U,Nf#9.ޚx=(o`܌"A濑]\DEsA4VQQp1ȝ5ր Ҵ]@n7Bِ l$^]`03f$clx'wN 42lED}0 RZ6ֱKW/m0>תy]kG;d224 1Ɇ_@>ț f  :aos`$݁?3q7_:Sq[_> 32 9)sZ [:"o5D4CM2R-%~زL=bC==o8 k:|~Ѹf-D@mśB9Q[AZ>[EPQ4޺fF>6)a[a^/zχ%7/\dTUP۬0^?V3rnuӨWyW=4&<\yG DHbue[5+uک1\jkskIUNʇwz2S3+߿nj'k/W! pySL݉#먭c*Y;e]p V;)͗`!Z-sXL?_?0[?=VOaY>6?Sx@cϒ}ِEP^c;g0x}m]ߒ+\%Wy=0] h{-#%}NRjOb3ƹ%D[LMY1ZiܤLq8tIqR8+ V*GaP JJRj9_ ޗ* '+=F+%g +]T_TK'+=>+SxnCM}blz}Kzû4 Kkk1_yZqAyrڼ!^yp|߬e[B_Czn 2b((wRKP7{Mq`LV(]K@-ڶUKyg3 ZRJz~R *)YR:Q\'D󃯒ji+`\%Yx~ҥJ 0*YͿ"7hE)#Z#T7j͡u4-2TR4z0a|8e]!Q윽;{֊cxFw;ND48ZRaΒ&rgW,d.d9 Zr1M2MhRPV إsEŦ,e2e'K˴IyTyKᚧRT&\j'D:2u9+;oj+1ZĿ,vz|UJ)[iFJ8uuBib۩Ĩ0=kЧɸCG3Jp"?cz1m# kO=|F<tL`ZSsRDC /\RFufgOm7oUSoԶ|St#oM/E2*a$s8QBwDl^wj0Vj!͔A#8YS;L3_t:ՐS%7˥N=#"ްVyde"L?$kG%J$4cF"' .A&sBJZ6" ]BU?%1-"V1N+R& 3OVzV%mJ⬴'+=j+%gdW aVJ&J 4'_zV*yy(<c𥻩P eNVzV7^3QX)8+-& dGm 7zV{'lVJ;jOS`JvrV@Pkjm d,i 㬴:ЏJ&jD-TjV7QK&j XRqKF1ST L㝦 1T2#,g(R4\ %4sB$8V.`"Fs3)4Jت Tw77W_~,~]kvCEX#K]L-P8k!}XU+do?5CPGt+j 9z{}iѾUwsͮ) T'4I:&=}-+h|a#|e&C8n,͜f(EmPW;uJ} j1 hZtd/HCVByRC,N[x] Sm=0=CF=S1E$bAUT8!]DEZ 4/i^Ӽ yY62~ΌG2k$^9JP\ d'%KFoVhm?^f NcN*ˌ5LJe.<1s R99(C>q#~m6P Q _UuڮVb䫽C|5&wy r;K:){jof.).'YL$fH mxnu qc!$ PB):i42Qr!yΙ:),F/ɗ>_-旡,+R.uQBb4/Ξm~߰*߯|?? {m=+J?zc;6Bbͼf ߲\)?}sn5[pPA="%@DйsS2T3b<ZKRs^nr/.ȵ0 )1 K$Ag3l#F_*οݦ_.6 !o^EdGS*~b$|yIk%բxÇ+~􏫛p#0SƧtŸg?>yJ!=^_nЉoOnـhsdlrm]31o"aF[]M 0>=X ϖgo_Ɨ +*[Fk0k[}?M? (~)-ũY'yzPM_-o?9{K7kh3{֖sq5QO}bu=Bq়.F(ӨqlV\UwKZ2$AqCA(/)u Xj 'j&_xO\58?t&3^t7wz;wtyæn)qq_zhH}()M(ɫj"wTR~nFmk(><-$E Q3,V T3SC* BjVZ)tg@tɂ.Q Aٲ4g@rv˩m3HB,HŐ\4N)ON1͒iŒ|LiěVJSƼ~owTז#Ԡea-JE[qG  TFs66`чyM)jK?9X(JR/ʼbD0_ԕ ׸ ;~j#5m c^0Yw^EȲB.%VпK*T 2Yy>,7@ GbO)ȟR? [ 8(J8ʽ4>0S\%7$NXiC !@bP*ڇ ou&0T&UQSz`z҇J2>[ +[g_n 1/qqwe~e_5x&qQt2UB@ <_LL#ꩳVb>OLCA) HNP\HZD  1Ee^ 4Z!,A! qlt~b*vA!OGg)&n'S!q| wWoJqJ5RTm D'iЅ}:RP*ar_svC 4|asѕ44NC fķ:t;R/߁q3rLѵLjp4B) EyC [ՖE+I|v=ٌ^I"*$\L|()+vkƺ++{Rq\?E45 ^q`RHJunT|nTMOܨ32.S3NsbvITѫb5˴M r?k"F-03Ds{*aqփ] 9ݻZZw㤧y`! 2~x|:|0w!o5❅X뽛HbwAtjQһoɓb`ޭn#w!o+♘Sɩ\V$kEƗsd|Q*j%BDOϙɩ$繿6Ѳcr?L\`_!h]`n;,]py>e.S5-{|>q>|PNI.SEw¦Ks{w|c?>|6Ḽ '3褌e'~?:`F 34QBDLr855Z;~,ǂeƂ3_8q3x|N/uyTsB*ݹyFNXy*$J4.#% ~>/~n 8/vyuH"%:H@X5zQ(ÍjN5( zmQM=n:uM:Oj&fl#%6VlTcbC  =B?$R0k[&$PG%l{=)7J)Ch(N^B5pk8'%!@Mcs* V3VhbMHNx4 Mi$w< '-um4~~";|E|bWJqq\TU8?Tr܄3BK4$C;`Ho†tŽ Ap!(4Bv˫U uQrHm`m*zV$B6 -ę7'Lz>ipEY ܭML74x3+E0 F+h[``1;y*zh 06m.$»]ULҩ~^&`%c\Z Rʜ h%sVG Lj͍,Ӎl8eRށo\+x{!*!mTZ8mIt K ^Yj,zeFwxBLCU4`Rj]c"{c7d)SMRFH}F 1]Gb9dx7L)G?Rna>ȁN3Jz#ξbLp$h[y”Уѯn#uLJdW>H`xuɆN{!o{cJR)ו*)wQa#;$Kf3Ao&Ao9tLfT9/t2zf>Yf,·ـ/rr\?3D;{i4Rד{a8!-SWc{ J׵,m'XQLb;qw'}|׾n7'wkcR^Owɀ=yf 'DMˏ͹DrsȊ]!{-×z JV+I!v68w'aKUJIPUQ1qFI4.۳P 2;RZ RR=EAd'6ȴ(E*R__c5g?Y23l'?IuR#F~x=M@ZNo\zQi Dl`0E4'X* un'b]]^XO1@ŨBO"Ǔ{ʹ|my+~KPvȔE  [iҷBG +[97yz,"o @KPq ԭr@^v?\)pM)/:9VxflHC^R%44]t!G0re= JVyQuT{w>tvy{76*z亹yr3m=VL3S*8#S6zmBӠ1`6"aׄ8iM﵏8!4k^S h8V[o Px{Gt!JxA5@YXq{i*8l@4 ,m5"+m0~ݤPSؚ5JCb(mmiutղVwBw'PkS1TLER~D\POhݻ_j¹XcӢ f.@5>i sb>FPj!,Z&,D%;UGJMGcw]uX1T4 9w*ߥY4U]+Lp֤lIV[޽גz_BWWsbӢħ! s3Yi"(\`BrRL!,NBXI->E&+7~,){sjl=F݅8bşȭGNwָo/3YWIJɾ9a1lŊ}v䇊y,/L.qOo;=]?9p=F<^yW]l.Af`J%FI*LmjEM!@%  Q ^EC4,V3}8g3!qg m2zY46#җr!Se{S\s^ VV]{CJRC  ԋwk3{ im D:AF:(ZlMd+x ]K4Fw.H$_:S:mt?b2zCɭbt8vT),Ѵ ݁jvФm3_uB`X{MX`F(.$JdT>t$2 ;?R/ ׬Ljfo! O>XnV4ui.4uvjN;`oSmWߧ Q}u>Wn1(0*r*0M18A{EBB9_0Q. w,`t(]l.'gMpch9DH2(UAxnf4JL7\_[)w(ACKz%HnKȲ«+h 3 \ef$pBk&$W`kj Zy[s^Ź3*_i%jp?͖,ѱ4eGn%HM^ޭna,jݶ RxJ*L4Mzgoý柷#JҌV@^m"ev\>N cH>mzȬ-y5ኢkKL5~JSN-ŴVAq9u[)Lݳy"㩫G$u |j YڷRrv<}*2T3:VWM;`Xݼd}/> t2w!{AՕ/@Q:Ո5O AUg- PIHO˼\PRRbV[;TZR)Ж(P240i8%V@+H2V@EEuʇ%%Y0n,)ISfnMv ;ځ568BlE5qf Th遑 fNLtzm4el]1NWf*Sõ*"W-nZV$$MRK*9q h1-}x܊b }<[5aQ$9f;`[q^g+XuEAyCe>HԆXGЖhOcCrP/6  :fͤ 9\aWb{gC+ƍ`sjRmܣ}T[  T+pKQ(gI[3bd_6A5 X[I4mtEѧ,YV]+LTRŭ͏Sw=wНL1:}cngSYyκSD Ђ(Uy&R2jk)QΡgPkNE<)}$^_M.籈-4OC晓zh~V?+g${-BHfq-jZ/|YΊ盡M/O_*\X}3l uHDоRt^&c$qdžRʮ%*rx^ wZje~}ŊY>mZ1']Y~oG|$?ѵ- 4*z&ք5MpXǭK:跔ccuGtc>_,\^/^/^K:07b>Q ~yɩ$D%ń^e<_4tr+p g߸m2)1J\!ຼثbZFN/Pi{;r PAG x񐍡8U_j z'HϐWNpb.LRRk.H c}ݑZc"ݬjUP/LV'ƛet?ꗅ 1h.7]pgy}^-_"uhs1?n-gvƛWDWS=%Y3.I׍6+R69py:#>¢A@MĦXg;M[))Sv u[EnGm y&ZdSZ}y׻V![!IS,vU~h7V/3}nh>.>|ySط?eAa◊t2-Y،3`\z.PM8l3nlcvUSH(1o7P68I<ZW`4ma3t$4㙗L{ v[uʹfa02JD ,$!QZfI1%fvQYuɏ'ǕUs.gU%4I&Wm)Ŕ3IiCY5xJ87UE%@[/)ؘ K@TF/|2Ojk,cOڋܶvH06V'2T "ڙJzgcxQN18lP qlvKňϸ~# 5'*"Փzӟ~k>#TJRwۛOd#?Q.CɟϾ,/__0tSB$v?7'SWB_7]i+A[{)J(bZi-O&jQsCA E/cEu WY$3~j$N&*ķhIlfS0jLсZF g v븯`cR)5Zˑh.vL:(|5AGoڲƇ`NywDS&.g/bQ莛s!PJُ볒9ZySM,,$vWNTp Ok0 j>ő N0uT)Jyddh&VWKN#[S&ղߜ:x}F&7{}oK +O^?mnFWewoc4\7W7\mTIL0VK_melMfb& t| *|=3^pEv6@UȪWlN~eWe&m[%14=ntiu~!0I&M (Zi66rtQ$FpJ cFR~N')g::=W$Ŵ&O$H}J8OOdM"JYigǐk`< W߳2P3^4~ ^f-g9\qLZdYKpJYF -PJG#xr:Gj`APؑ C߸ =XW3ʟ~/Df<6"u`[\>w hI7ŠirI.M\;\;\;\qA9eHRet iB,8#R{]sdʐ,f?|WX jtQfX+`ugS=t?5]۰w!SmjLbP&:ӱp-jA&#$V,ϔ9SօŤPS 'TaվϛEX^{+X_BT()N[['C--JO-J;7D"qnQilTf:1%%)c}0Z'VцhʥrC%Ce[ZTZTp7!:@+hS%TGdT ߮>W;$qrEYD?c )c [hf YhzGk<77P~( -Z؏]里&_%뱿yfzp@_Yҥ7\Z،컌w"^bgmo086j{T2 #pr#XM \bpgzjv}:)N,>M{s{a(0MqA÷J8 iH{iDyrOF{$,UJƱKCH27FC0)GjLb<)h vyղFoI-8:vʳ*S@~U]Qz(q9bۺQOyUj"ї6JvUQgg|&R\Zj*J\(=D"!~,&R?:)(qRPʹOZyD!T{ԫv'RϫRRJPJ%a@>)G4JC)LN×2R'5,I}D!=g|,f|B$P3~!5JR3묽L+3Jz^Z{Qz(%REI$POyUj1t(a.$$PJJ %3R]:zB.&R]8!G4JvV(%@׮ZjcA׌﮳s'R=~Uű&QJJ B B{ׄ q__,{-dB7耞v^֝{6/A/ܥki,R'3?]Ǐ#ae%)QŻSK\=<[fo?|,gWHC6 )G$U QL~;LR:-yZ2 1ߓn}I+ⷲey2L1'Zo|z o'_U^?GS|'fq械ͥ,W\4 23JڿX~ .Oif@*)fu+q<^}@JAw`X2ήFn3K?s>{/\\QV*m.voy^}}|md ZMQAl٥ݽ?~ʛ蟼t/cw7?0hNKZ31r{G+ZӧEz߉̈ũܪqhw9F]^X~NVLF`Fbe'<ɍ`G**;l7V7$M$g14V/K e㥻l%@+}:\/1] R#2*˴^dd|?;5X 9|V[_u6taCE|廛x[u?bb$˰6$JSt&cmUNs:Mb1v(-ksO5HKtf*/v#C D174)"d)t\0;˕2 HU e4CMh"Gފe4o.˃;voH[XWϲBboz? ,LH=|?wߟ_*_]/\aխ}おS ۟}u*6L_[}AwY~{v85dv-Y{=7 .Ho("E_C'7sC}C V[ھ82FJ]4 ev%\buA!v.6;Y7^0m_7~$ɽAra~X&gPxE'I+ u}Lv-%jz(=7[,YvFDᴢu 呍EPMeHP]fOw&{?^_M(iI P-'=U;Rvjfϕ+y \ 64pnq; ۑKx435HOhYϙjWz}]}v׫kF|lu~/d. _kWnbV ei֖bUH){Q̔U%Zڀc3#.>| TTJ< U!*,)GU4cםu)F*`@ Hu jC^ݍtrᐋ[hCՖ&ĭV)Ho"/Ҷ~RS59;dt񵌄ӖDeHflo"^bS||zu>I;ɇ@@+hR-T> }F6b4cyIOB^9D0|wCʧBug4n#"M̻n] C))I`uf_ &v蹿,n?6 \aim_חG^?2xTsQI>2ڝ.ˏ?r!I?RRO_UL }-jYBI9NIt@٪x?clkC6 bt'lh6IaItޒ:؆FzA{K0ШөLZ@Р!<(phAͥ*c[LRTg9U "Ea2j`שݺ*UYF)?42E5\P9/< EPQjq!m_%(5Z5XP% EeJ0>+FLZ?G/i JA- ""JGBN!\sCh)JTPL 44-$IcaftQ<3"N4Ub^6ܡ`U~ Q px ܢ91`RfH"L'ѮiԝE+$ M &˲4SĠrPmxDD0EcV1 ,1W0Lg w LTfF$[::,It]/*5}v(eZtDb ~=gF* })OJNr%zWL5(c]Q4IB[U5hCCiu-sgml@@!]JZ2Ю;k* @[Bi++ۺ8ID@2tvmOq-f0>)xMq_a['*aSƒk6 rŽ:ʺdUfOq'~p??*(un*r{?k 5wUw0_/bfOAڤC'gv d,^\]gZO'Mg]"C3'/'Eٳ.-!!%iY9+wc0&k/?6BV~+$B15;0Njؾz"& C~0RÝnߠ =׼H);tvM^/aNy0|KmeͺRK`噸^kU#d^Z@|i> xo)!r0*ٓJ ra gmV&Dѻ:hƋMpD)9KOB^9DC0%|ĹtoD>ws!4[x w!1E]*hv^ůy~ Zn5)\R翶瀦ZFD{f旹Ydxh %hwՃ[qGkdQdNJM:)5UVrc{JL211Aoz' aqm-D&G-Kj ֌~zx 15003ms (05:45:57.899) Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[1330529987]: [15.003252257s] [15.003252257s] END Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899301 4982 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899354 4982 trace.go:236] Trace[1951298187]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 05:45:43.472) (total time: 14426ms): Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[1951298187]: ---"Objects listed" error: 14426ms (05:45:57.899) Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[1951298187]: [14.42680711s] [14.42680711s] END Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899385 4982 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899438 4982 trace.go:236] Trace[844422520]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 05:45:42.994) (total time: 14905ms): Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[844422520]: ---"Objects listed" error: 14905ms (05:45:57.899) Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[844422520]: [14.905372545s] [14.905372545s] END Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899453 4982 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.899714 4982 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.905552 4982 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 05:45:57 crc kubenswrapper[4982]: E0122 05:45:57.907251 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.907480 4982 trace.go:236] Trace[862333242]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 05:45:42.913) (total time: 14994ms): Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[862333242]: ---"Objects listed" error: 14994ms (05:45:57.907) Jan 22 05:45:57 crc kubenswrapper[4982]: Trace[862333242]: [14.994253647s] [14.994253647s] END Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.907507 4982 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.911909 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.919790 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:45:57 crc kubenswrapper[4982]: I0122 05:45:57.950502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.607970 4982 apiserver.go:52] "Watching apiserver" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.612001 4982 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.612494 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613125 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613257 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.613154 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613320 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613583 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.613748 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.614513 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.613881 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617232 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617637 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617804 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617954 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.618021 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617951 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617803 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.617961 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.618264 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.619499 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 05:04:04.771395573 +0000 UTC Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.658755 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.670626 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.693493 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.710462 4982 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.711467 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.737979 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.748742 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.760562 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.771525 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.805993 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806039 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806057 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806103 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806124 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806143 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806164 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806182 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806201 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806322 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806349 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806379 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806390 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806400 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806463 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806487 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806509 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806534 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806560 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806600 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806643 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806662 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806752 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806775 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806795 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806815 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806835 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806873 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806897 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806916 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806938 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806960 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.806982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807003 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807024 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807048 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807070 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807093 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807116 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807200 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807212 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807239 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807259 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807282 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807305 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807325 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807351 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807374 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807398 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807422 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807445 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807467 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807492 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807565 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807575 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807598 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807666 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807696 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807716 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807718 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807753 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807774 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807793 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807809 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807845 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807897 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807906 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807925 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807950 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.807976 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808013 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808039 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808089 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808118 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808138 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808159 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808181 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808205 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808228 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808243 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808258 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808309 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808331 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808379 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808409 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808452 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808473 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808525 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808547 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808593 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808618 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808641 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808663 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808690 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808713 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808735 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808757 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808778 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808768 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808802 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808869 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808894 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808916 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808940 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808962 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808987 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.808987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809013 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809036 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809059 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809083 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809106 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809132 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809161 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809163 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809185 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809210 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809234 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809231 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809257 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809306 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809332 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809332 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809316 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809355 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809389 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809415 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809464 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809493 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809498 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809517 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809594 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809619 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809641 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809664 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809672 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809689 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809712 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809735 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809738 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809760 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809783 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809807 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809830 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809870 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809901 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809924 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809946 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809946 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.809975 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810002 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810024 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810050 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810098 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810124 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810146 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810171 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810219 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810242 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810265 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810288 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810311 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810335 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810361 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810383 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810409 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810432 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810456 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810480 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810506 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810531 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810559 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810583 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810607 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810634 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810657 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810714 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810739 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810763 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810788 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810812 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810837 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810882 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810909 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810941 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810965 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810992 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811023 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811049 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811181 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811213 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810256 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810437 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810458 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810496 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810667 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810671 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810737 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.810889 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811104 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811164 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.811338 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:45:59.311317884 +0000 UTC m=+20.149955887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814839 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814885 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814915 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814942 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814968 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815098 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815116 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815133 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815146 4982 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815162 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815176 4982 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815185 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815195 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815205 4982 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815217 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815226 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815235 4982 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815246 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815255 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815265 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815275 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815288 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.815372 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.815427 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:45:59.315409311 +0000 UTC m=+20.154047314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.815958 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.816049 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.816096 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.816207 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.816402 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.816418 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.817006 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.817064 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811479 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811702 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811713 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811924 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811950 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812143 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812241 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812415 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812688 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812844 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.812967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813005 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813056 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813178 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813429 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813420 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813461 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813471 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813480 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813569 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813677 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813688 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.813823 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814423 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814455 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814599 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.814670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.822935 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.822985 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823127 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823166 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823286 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823306 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823355 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.811457 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823828 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823891 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.823887 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.824226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.824435 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.824704 4982 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.825274 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.825630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.825899 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.825964 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.826135 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.826414 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.826790 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.827165 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.827489 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.827712 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831236 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831484 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831614 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831588 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831755 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831800 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.831936 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832119 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832138 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832533 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.822932 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832797 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832914 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.832874 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.832966 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:45:58 crc kubenswrapper[4982]: E0122 05:45:58.833052 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:45:59.333034374 +0000 UTC m=+20.171672487 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.833218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.833934 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.834384 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.834794 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.835231 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.835579 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.835694 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.836171 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.839784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.839919 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840037 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840160 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840457 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840547 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840557 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840893 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.840935 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841153 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841198 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841361 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841557 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841574 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.841737 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842168 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842330 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842185 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842448 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842652 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842542 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842679 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842808 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842841 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.842962 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843546 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843578 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843627 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843691 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843884 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843902 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.843913 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844179 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844185 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844194 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844223 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844318 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844432 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844590 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844643 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844825 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.844995 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845018 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845030 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845523 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845580 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.845872 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846031 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846291 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846421 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846440 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846494 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846575 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846666 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846704 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.846984 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847053 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847191 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847227 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847229 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847719 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.847942 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.848040 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.848155 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.848209 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.848170 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.852031 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.852595 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916151 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916328 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916342 4982 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916354 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916364 4982 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916376 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916385 4982 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916395 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916405 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916413 4982 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916422 4982 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916608 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916421 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916617 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916689 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916704 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916718 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916736 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916750 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916764 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916775 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916787 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916800 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916812 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916825 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916838 4982 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916865 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916879 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916892 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916903 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916915 4982 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916926 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916939 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916950 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916961 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916975 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916986 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916997 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917009 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917041 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917053 4982 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917066 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917079 4982 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917091 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917103 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917114 4982 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917128 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917140 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917153 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917167 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917181 4982 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917193 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917205 4982 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917217 4982 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917230 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917243 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917255 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917268 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917281 4982 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917293 4982 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917307 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917318 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917330 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917342 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917359 4982 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917371 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917383 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917396 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917407 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917419 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917431 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917442 4982 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917455 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917466 4982 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917478 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917489 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917502 4982 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917514 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917527 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917539 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917549 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917563 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917575 4982 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917592 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917604 4982 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917616 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917629 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917640 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917652 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917665 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917677 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917690 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917703 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917716 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917727 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917740 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917751 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917763 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917776 4982 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917788 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917802 4982 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917815 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917829 4982 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917841 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917868 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917881 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917893 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917904 4982 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917916 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917928 4982 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917940 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917951 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917962 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917975 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.917987 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918000 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918013 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918027 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918039 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918051 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918064 4982 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918077 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918089 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918101 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918113 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918125 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918138 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918150 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918161 4982 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918172 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918183 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918194 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918209 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918221 4982 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918231 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918242 4982 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918253 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918264 4982 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918276 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918287 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918299 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918310 4982 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918322 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918334 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918348 4982 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918362 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918376 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918389 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918400 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918413 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918425 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918436 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918448 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918458 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918470 4982 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918482 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918494 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918507 4982 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918518 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918530 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918541 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918554 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918567 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918579 4982 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918593 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918604 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918636 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918648 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918661 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918672 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918683 4982 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918694 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918705 4982 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918717 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918727 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918738 4982 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.918747 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:58 crc kubenswrapper[4982]: I0122 05:45:58.916660 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.062753 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.064807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.086925 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.095620 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.095620 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.095939 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.096224 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.097453 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.097491 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.097510 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.097594 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:45:59.597564905 +0000 UTC m=+20.436202918 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.099774 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.099794 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.099807 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.099846 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:45:59.599834635 +0000 UTC m=+20.438472648 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.103593 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.120362 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.120395 4982 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.141578 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.148087 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.153321 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.155137 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.167430 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.180991 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.199426 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.212340 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.222991 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.230141 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.232904 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.241964 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.244444 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.247752 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.261116 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: W0122 05:45:59.263185 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-21a0c0db611b808de3f3e25b485ba0858c5c0190babcecfab164f3e65e78b974 WatchSource:0}: Error finding container 21a0c0db611b808de3f3e25b485ba0858c5c0190babcecfab164f3e65e78b974: Status 404 returned error can't find the container with id 21a0c0db611b808de3f3e25b485ba0858c5c0190babcecfab164f3e65e78b974 Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.274702 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.285392 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.296190 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.309165 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.322355 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.322453 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.322592 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.322589 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:46:00.322553958 +0000 UTC m=+21.161191971 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.322653 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:00.32263873 +0000 UTC m=+21.161276733 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.324264 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.338181 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.424105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.424340 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.424462 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:00.424433291 +0000 UTC m=+21.263071344 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.619896 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 10:01:43.779523314 +0000 UTC Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.625496 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.625599 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.625777 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.625889 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.625936 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.625886 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.626028 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.626042 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.626052 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:00.6260131 +0000 UTC m=+21.464651203 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: E0122 05:45:59.626116 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:00.626094872 +0000 UTC m=+21.464732875 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.727555 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.728224 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.729904 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.730418 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.730740 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.731780 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.732353 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.732963 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.734119 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.734772 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.735690 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.736290 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.737557 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.738260 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.738926 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.740016 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.740673 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.741793 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.742248 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.743254 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.743957 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.744877 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.745540 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.746061 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.746524 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.747315 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.747885 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.749192 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.749936 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.750973 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.751637 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.752668 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.753384 4982 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.753512 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.755808 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.756541 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.757073 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.759251 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.759596 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.760499 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.761199 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.762357 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.763101 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.764107 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.764743 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.765927 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.766616 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.767558 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.768274 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.769331 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.770233 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.771222 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.771356 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.771815 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.772988 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.773761 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.774481 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.775708 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.786321 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.795924 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.804706 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:45:59 crc kubenswrapper[4982]: I0122 05:45:59.824638 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.073418 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199"} Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.073501 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c"} Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.073513 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"21a0c0db611b808de3f3e25b485ba0858c5c0190babcecfab164f3e65e78b974"} Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.075292 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0ea40b577ef932d2365347b65bed6524abf5803e727dc5b65d1f081a97c65162"} Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.077662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4"} Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.077757 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4f46c3b844a71c4ba4c06b67b62fe62fee0bd10aed1e1c95ce1873691f477eae"} Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.088125 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.102544 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.121565 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.147301 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.163567 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.181450 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.201997 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.219491 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.236902 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.254545 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.270276 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.288132 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.304107 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.324516 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.333373 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.333509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.333687 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:46:02.333625468 +0000 UTC m=+23.172263491 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.333723 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.333811 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:02.333788402 +0000 UTC m=+23.172426445 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.339901 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.357909 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.374481 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.434236 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.434373 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.434791 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:02.434761001 +0000 UTC m=+23.273399034 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.620079 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 08:05:54.195562915 +0000 UTC Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.636840 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.636918 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637083 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637112 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637131 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637141 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637147 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637159 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637220 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:02.637201033 +0000 UTC m=+23.475839036 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.637239 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:02.637232914 +0000 UTC m=+23.475870917 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.722621 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.722665 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.722832 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.722989 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:00 crc kubenswrapper[4982]: I0122 05:46:00.723180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:00 crc kubenswrapper[4982]: E0122 05:46:00.723446 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.107936 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.110265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.110372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.110392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.110587 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.132774 4982 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.133406 4982 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.135611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.135676 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.135697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.135737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.135756 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.199457 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.208040 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.208173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.208192 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.208217 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.208270 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.275672 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.285349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.285403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.285414 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.285431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.285440 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.302957 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.310649 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.310702 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.310732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.310753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.310766 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.325771 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.329580 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.329611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.329620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.329639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.329648 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.346229 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: E0122 05:46:01.346403 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.348283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.348315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.348329 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.348351 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.348365 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.450925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.451008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.451020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.451041 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.451054 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.554065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.554128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.554142 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.554166 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.554189 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.620491 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 21:36:43.67922391 +0000 UTC Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.661885 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.661926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.661939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.661957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.661973 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.687749 4982 csr.go:261] certificate signing request csr-cpzns is approved, waiting to be issued Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.706724 4982 csr.go:257] certificate signing request csr-cpzns is issued Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.764865 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.764909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.764918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.764937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.764948 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.792285 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-rl6kj"] Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.792637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.802766 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.802843 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.802884 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.807479 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.811638 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-rnqxd"] Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.812089 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.815404 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.816389 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.816650 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.833955 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.851083 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/73c68787-8e13-4ad7-bef9-00a12d537c34-hosts-file\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.851137 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf76n\" (UniqueName: \"kubernetes.io/projected/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-kube-api-access-jf76n\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.851163 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-host\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.851185 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-serviceca\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.851208 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnclm\" (UniqueName: \"kubernetes.io/projected/73c68787-8e13-4ad7-bef9-00a12d537c34-kube-api-access-vnclm\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867799 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867811 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.867842 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.920581 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952005 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/73c68787-8e13-4ad7-bef9-00a12d537c34-hosts-file\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952085 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf76n\" (UniqueName: \"kubernetes.io/projected/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-kube-api-access-jf76n\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952125 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-host\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-serviceca\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952161 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnclm\" (UniqueName: \"kubernetes.io/projected/73c68787-8e13-4ad7-bef9-00a12d537c34-kube-api-access-vnclm\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952686 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/73c68787-8e13-4ad7-bef9-00a12d537c34-hosts-file\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.952900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-host\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.953957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-serviceca\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.970827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.970895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.970904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.970942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.970955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:01Z","lastTransitionTime":"2026-01-22T05:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.971589 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:01Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:01 crc kubenswrapper[4982]: I0122 05:46:01.987984 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnclm\" (UniqueName: \"kubernetes.io/projected/73c68787-8e13-4ad7-bef9-00a12d537c34-kube-api-access-vnclm\") pod \"node-resolver-rnqxd\" (UID: \"73c68787-8e13-4ad7-bef9-00a12d537c34\") " pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.000748 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf76n\" (UniqueName: \"kubernetes.io/projected/fef7fafd-1792-4aba-b5f6-67faaa11bdb7-kube-api-access-jf76n\") pod \"node-ca-rl6kj\" (UID: \"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\") " pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.023790 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.049548 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.064525 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.073175 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.073221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.073234 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.073254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.073268 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.083719 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.084575 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.100663 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.110933 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-rl6kj" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.113696 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.122812 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rnqxd" Jan 22 05:46:02 crc kubenswrapper[4982]: W0122 05:46:02.123060 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfef7fafd_1792_4aba_b5f6_67faaa11bdb7.slice/crio-103191e9e647ae764031472a7a53fbf748734bc3f724ca80743c28777b910572 WatchSource:0}: Error finding container 103191e9e647ae764031472a7a53fbf748734bc3f724ca80743c28777b910572: Status 404 returned error can't find the container with id 103191e9e647ae764031472a7a53fbf748734bc3f724ca80743c28777b910572 Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.135317 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: W0122 05:46:02.140294 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73c68787_8e13_4ad7_bef9_00a12d537c34.slice/crio-ce800f2723d03e7cc28a9a7060eac3ac5d23592f6e849a071c38e6e8d6986d6f WatchSource:0}: Error finding container ce800f2723d03e7cc28a9a7060eac3ac5d23592f6e849a071c38e6e8d6986d6f: Status 404 returned error can't find the container with id ce800f2723d03e7cc28a9a7060eac3ac5d23592f6e849a071c38e6e8d6986d6f Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.155876 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.171449 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.177305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.177381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.177393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.177413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.177428 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.188378 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.207934 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.229100 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.240874 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.254392 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.272613 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.286657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.286708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.286721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.286738 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.286749 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.355472 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.355720 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:46:06.355691235 +0000 UTC m=+27.194329238 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.356015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.356109 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.356163 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:06.356155547 +0000 UTC m=+27.194793550 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.389501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.389566 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.389578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.389599 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.389632 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.457283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.457397 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.457461 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:06.457443995 +0000 UTC m=+27.296082008 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.492978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.493033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.493046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.493067 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.493082 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.596170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.596219 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.596229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.596247 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.596258 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.621547 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 16:37:41.060571561 +0000 UTC Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.658533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.658596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658727 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658758 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658761 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658770 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658785 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658800 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658838 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:06.658820949 +0000 UTC m=+27.497458952 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.658885 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:06.6588654 +0000 UTC m=+27.497503413 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.681736 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-4xrw6"] Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.682137 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.684931 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.685575 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mb2qs"] Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.685917 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.686156 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.686479 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.686548 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-f98dk"] Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.686716 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.686774 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.687981 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-gdpxx"] Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.688281 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.688935 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.692471 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693143 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693209 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693447 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693503 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693574 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.693628 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.694534 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.694656 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.694750 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.694840 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.694909 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.695102 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.695346 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.699400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.699431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.699441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.699457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.699467 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.707979 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.709231 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-22 05:41:01 +0000 UTC, rotation deadline is 2026-12-04 16:13:50.559516355 +0000 UTC Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.709278 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7594h27m47.850241382s for next certificate rotation Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.718627 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.718647 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.718747 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.718881 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.718994 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:02 crc kubenswrapper[4982]: E0122 05:46:02.719206 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.721655 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.736580 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.750281 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759613 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-system-cni-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-os-release\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759693 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-socket-dir-parent\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759709 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-kubelet\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759725 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4zgf\" (UniqueName: \"kubernetes.io/projected/64ec5ffc-3edf-4820-bc21-58b1ace5615c-kube-api-access-s4zgf\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759746 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759765 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cni-binary-copy\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759905 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.759925 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760024 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-k8s-cni-cncf-io\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760099 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2829369e-72ba-4637-853b-88f5cf242a0e-mcd-auth-proxy-config\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760152 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760187 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-multus\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760213 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-daemon-config\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760239 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj8lh\" (UniqueName: \"kubernetes.io/projected/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-kube-api-access-vj8lh\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760276 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/2829369e-72ba-4637-853b-88f5cf242a0e-rootfs\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760304 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760331 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-bin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760355 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-multus-certs\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760381 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760418 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760480 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760531 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760581 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cnibin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760606 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-etc-kubernetes\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760642 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm8ld\" (UniqueName: \"kubernetes.io/projected/2829369e-72ba-4637-853b-88f5cf242a0e-kube-api-access-nm8ld\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760667 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-binary-copy\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760692 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760745 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-os-release\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2829369e-72ba-4637-853b-88f5cf242a0e-proxy-tls\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760832 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.760933 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-conf-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761041 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-system-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761063 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-netns\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761106 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761134 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761160 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr8wb\" (UniqueName: \"kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761187 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-hostroot\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cnibin\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761252 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.761277 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.764711 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.779146 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.794559 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.801486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.801554 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.801568 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.801594 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.801607 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.808599 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.819742 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.835131 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.849616 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862371 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862416 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-os-release\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862438 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2829369e-72ba-4637-853b-88f5cf242a0e-proxy-tls\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862463 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862489 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862507 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862552 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-conf-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862571 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-system-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862590 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-netns\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862617 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862640 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862666 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr8wb\" (UniqueName: \"kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-os-release\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862719 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-hostroot\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862689 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-hostroot\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862790 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-system-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862794 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cnibin\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cnibin\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862826 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-netns\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-conf-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862840 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862892 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862888 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862947 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.862990 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863050 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863188 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-socket-dir-parent\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863266 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-socket-dir-parent\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-kubelet\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-system-cni-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863393 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-kubelet\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-os-release\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863421 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-system-cni-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-os-release\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863411 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/64ec5ffc-3edf-4820-bc21-58b1ace5615c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863572 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863465 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863577 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863609 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863678 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4zgf\" (UniqueName: \"kubernetes.io/projected/64ec5ffc-3edf-4820-bc21-58b1ace5615c-kube-api-access-s4zgf\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cni-binary-copy\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863793 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-k8s-cni-cncf-io\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2829369e-72ba-4637-853b-88f5cf242a0e-mcd-auth-proxy-config\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863869 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863882 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-k8s-cni-cncf-io\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863935 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-multus\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.863982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-daemon-config\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864024 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-multus\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864057 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj8lh\" (UniqueName: \"kubernetes.io/projected/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-kube-api-access-vj8lh\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/2829369e-72ba-4637-853b-88f5cf242a0e-rootfs\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864405 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/2829369e-72ba-4637-853b-88f5cf242a0e-rootfs\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864454 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cni-binary-copy\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864718 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2829369e-72ba-4637-853b-88f5cf242a0e-mcd-auth-proxy-config\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864831 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-daemon-config\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864931 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864960 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.864982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-bin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865021 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-multus-certs\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865042 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865079 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865095 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865111 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865068 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cnibin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865199 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865138 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-cnibin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865296 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-etc-kubernetes\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm8ld\" (UniqueName: \"kubernetes.io/projected/2829369e-72ba-4637-853b-88f5cf242a0e-kube-api-access-nm8ld\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865353 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-binary-copy\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865527 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-etc-kubernetes\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865719 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865781 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865933 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-multus-cni-dir\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865969 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.865993 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.866357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.866394 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.866410 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-var-lib-cni-bin\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.866432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/64ec5ffc-3edf-4820-bc21-58b1ace5615c-cni-binary-copy\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.866443 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-host-run-multus-certs\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.868697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2829369e-72ba-4637-853b-88f5cf242a0e-proxy-tls\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.873288 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.880841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm8ld\" (UniqueName: \"kubernetes.io/projected/2829369e-72ba-4637-853b-88f5cf242a0e-kube-api-access-nm8ld\") pod \"machine-config-daemon-gdpxx\" (UID: \"2829369e-72ba-4637-853b-88f5cf242a0e\") " pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.881007 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr8wb\" (UniqueName: \"kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb\") pod \"ovnkube-node-mb2qs\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.881039 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.881444 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4zgf\" (UniqueName: \"kubernetes.io/projected/64ec5ffc-3edf-4820-bc21-58b1ace5615c-kube-api-access-s4zgf\") pod \"multus-additional-cni-plugins-f98dk\" (UID: \"64ec5ffc-3edf-4820-bc21-58b1ace5615c\") " pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.883324 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj8lh\" (UniqueName: \"kubernetes.io/projected/f70d53ec-9c73-45bf-b6b4-ec45565ef1e6-kube-api-access-vj8lh\") pod \"multus-4xrw6\" (UID: \"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\") " pod="openshift-multus/multus-4xrw6" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.896611 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.904053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.904115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.904132 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.904158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.904182 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:02Z","lastTransitionTime":"2026-01-22T05:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.927186 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.942026 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.956985 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.971822 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.985638 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:02 crc kubenswrapper[4982]: I0122 05:46:02.999635 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.004222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4xrw6" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.007952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.007993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.008005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.008024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.008036 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.011475 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.019939 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.027124 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.027236 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f98dk" Jan 22 05:46:03 crc kubenswrapper[4982]: W0122 05:46:03.040406 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf70d53ec_9c73_45bf_b6b4_ec45565ef1e6.slice/crio-b3c3802d8d13e8095ed5ce93a7b35897f24a19f7db26b746bae30f9d686e376e WatchSource:0}: Error finding container b3c3802d8d13e8095ed5ce93a7b35897f24a19f7db26b746bae30f9d686e376e: Status 404 returned error can't find the container with id b3c3802d8d13e8095ed5ce93a7b35897f24a19f7db26b746bae30f9d686e376e Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.043350 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: W0122 05:46:03.051557 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45295ff5_bb7d_450f_9ff1_eeb4edb0d705.slice/crio-5e8c8e68d30551f0db062125d393170b625603591825be844bd20c1ca9c55e6a WatchSource:0}: Error finding container 5e8c8e68d30551f0db062125d393170b625603591825be844bd20c1ca9c55e6a: Status 404 returned error can't find the container with id 5e8c8e68d30551f0db062125d393170b625603591825be844bd20c1ca9c55e6a Jan 22 05:46:03 crc kubenswrapper[4982]: W0122 05:46:03.052163 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64ec5ffc_3edf_4820_bc21_58b1ace5615c.slice/crio-afb9a134cc15df0e285e788bc53aa55c007b7402ceab2fa6c5890ff9a9c74770 WatchSource:0}: Error finding container afb9a134cc15df0e285e788bc53aa55c007b7402ceab2fa6c5890ff9a9c74770: Status 404 returned error can't find the container with id afb9a134cc15df0e285e788bc53aa55c007b7402ceab2fa6c5890ff9a9c74770 Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.060463 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.070844 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.087721 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rnqxd" event={"ID":"73c68787-8e13-4ad7-bef9-00a12d537c34","Type":"ContainerStarted","Data":"56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.087780 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rnqxd" event={"ID":"73c68787-8e13-4ad7-bef9-00a12d537c34","Type":"ContainerStarted","Data":"ce800f2723d03e7cc28a9a7060eac3ac5d23592f6e849a071c38e6e8d6986d6f"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.089312 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"5e8c8e68d30551f0db062125d393170b625603591825be844bd20c1ca9c55e6a"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.090112 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerStarted","Data":"afb9a134cc15df0e285e788bc53aa55c007b7402ceab2fa6c5890ff9a9c74770"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.091246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-rl6kj" event={"ID":"fef7fafd-1792-4aba-b5f6-67faaa11bdb7","Type":"ContainerStarted","Data":"b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.091293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-rl6kj" event={"ID":"fef7fafd-1792-4aba-b5f6-67faaa11bdb7","Type":"ContainerStarted","Data":"103191e9e647ae764031472a7a53fbf748734bc3f724ca80743c28777b910572"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.091964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"e1fdb07ee263ced6418003646182100759eac8973d0e2c932f8682024960e918"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.092667 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerStarted","Data":"b3c3802d8d13e8095ed5ce93a7b35897f24a19f7db26b746bae30f9d686e376e"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.108936 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.110303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.110339 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.110349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.110374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.110385 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.145444 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.172709 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.188560 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.209102 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.220567 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.220605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.220616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.220630 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.220642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.253312 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.284032 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.324169 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.324206 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.324215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.324232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.324245 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.334814 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.347745 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.361457 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.375212 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.392067 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.405181 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.418592 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.426451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.426497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.426507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.426525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.426543 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.442025 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.529263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.529319 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.529330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.529351 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.529362 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.622393 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 09:23:15.080368274 +0000 UTC Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.632829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.632914 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.632927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.632947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.632960 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.735985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.736038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.736051 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.736079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.736095 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.839729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.839797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.839810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.839829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.839842 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.942598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.942655 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.942669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.942694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:03 crc kubenswrapper[4982]: I0122 05:46:03.942709 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:03Z","lastTransitionTime":"2026-01-22T05:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.046258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.046317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.046330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.046355 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.046370 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.098574 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd" exitCode=0 Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.098704 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.100584 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerStarted","Data":"081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.112069 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.112230 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.114055 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" exitCode=0 Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.114134 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.130179 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.145250 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.157798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.157955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.157982 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.158016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.158043 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.166830 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.184598 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.206086 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.225293 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.240547 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.255314 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.262924 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.262972 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.262987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.263009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.263027 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.266545 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.279277 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.294692 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.310767 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.326342 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.345226 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.359906 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.365488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.365524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.365536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.365553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.365567 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.373258 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.387707 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.405380 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.415038 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.425355 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.440041 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.457417 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.468501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.468545 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.468562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.468584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.468598 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.470320 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.508046 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.547746 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.564288 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.575548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.575754 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.575819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.575879 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.575903 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.588709 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.599997 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.622681 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 09:32:03.168562018 +0000 UTC Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.678954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.679014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.679027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.679044 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.679056 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.718571 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:04 crc kubenswrapper[4982]: E0122 05:46:04.718748 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.718890 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.718969 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:04 crc kubenswrapper[4982]: E0122 05:46:04.718981 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:04 crc kubenswrapper[4982]: E0122 05:46:04.719032 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.780822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.780891 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.780902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.780926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.780941 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.885882 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.885936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.885948 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.885971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.885985 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.988439 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.988483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.988494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.988512 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:04 crc kubenswrapper[4982]: I0122 05:46:04.988521 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:04Z","lastTransitionTime":"2026-01-22T05:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.090977 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.091023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.091036 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.091053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.091063 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.122604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerStarted","Data":"0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.128957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.129015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.129031 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.129044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.129055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.137625 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.151880 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.166004 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.182703 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.197814 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.203265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.203314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.203326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.203352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.203367 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.210985 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.230047 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.244658 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.259253 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.273347 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.288181 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.299064 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.306018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.306084 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.306104 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.306126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.306141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.312199 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.324923 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.409664 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.409716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.409726 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.409744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.409754 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.513161 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.513222 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.513235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.513255 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.513283 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.617491 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.617576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.617597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.617623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.617642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.623416 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 22:59:12.699065558 +0000 UTC Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.721208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.721267 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.721282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.721309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.721322 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.824483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.824548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.824565 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.824590 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.824631 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.927927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.927978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.927995 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.928019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:05 crc kubenswrapper[4982]: I0122 05:46:05.928039 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:05Z","lastTransitionTime":"2026-01-22T05:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.037066 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.037149 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.037170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.037203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.037225 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.140722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.140810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.140837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.140917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.140946 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.144793 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.149617 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055" exitCode=0 Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.149727 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.174561 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.194123 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.212665 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.237163 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.248324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.248380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.248394 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.248413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.248429 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.256742 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.274915 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.292422 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.318091 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.333920 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.351540 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.353452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.353493 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.353507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.353541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.353557 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.369097 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.385999 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.399048 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.410828 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.411019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.411252 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.411326 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:14.4113086 +0000 UTC m=+35.249946603 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.411417 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:46:14.411381132 +0000 UTC m=+35.250019135 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.411450 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:06Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.456996 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.457042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.457055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.457074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.457087 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.511957 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.512287 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.512453 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:14.512432294 +0000 UTC m=+35.351070307 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.561385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.561751 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.561845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.561958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.562106 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.624034 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 15:28:28.575770118 +0000 UTC Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.666187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.666233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.666248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.666269 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.666285 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.714380 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.714443 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.714471 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.714572 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:14.714539407 +0000 UTC m=+35.553177450 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.714188 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.715026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.715375 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.715409 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.715432 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.716135 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:14.716048226 +0000 UTC m=+35.554686269 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.718337 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.718384 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.718443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.719113 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.719227 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:06 crc kubenswrapper[4982]: E0122 05:46:06.719315 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.769798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.769947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.770012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.770051 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.770080 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.873500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.873566 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.873578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.873601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.873615 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.977804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.977876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.977889 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.977906 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:06 crc kubenswrapper[4982]: I0122 05:46:06.977918 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:06Z","lastTransitionTime":"2026-01-22T05:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.081930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.082349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.082502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.082678 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.082897 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.158371 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57" exitCode=0 Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.158492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.181307 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.188955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.189127 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.189236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.189321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.189397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.209224 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.227820 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.253884 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.273875 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.288105 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.292650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.292698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.292712 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.292730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.292744 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.303028 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.328175 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.341946 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.358685 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.382782 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.397966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.398033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.398045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.398059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.398070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.399540 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.415074 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.426753 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:07Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.504781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.504828 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.504841 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.504896 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.504918 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.608889 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.608965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.608981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.609006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.609023 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.624351 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 05:02:20.869010532 +0000 UTC Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.712275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.712333 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.712351 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.712376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.712391 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.815903 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.815981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.816005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.816034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.816053 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.919085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.919135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.919146 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.919163 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:07 crc kubenswrapper[4982]: I0122 05:46:07.919176 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:07Z","lastTransitionTime":"2026-01-22T05:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.022023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.022094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.022109 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.022130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.022141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.125241 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.125294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.125308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.125327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.125345 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.172868 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db" exitCode=0 Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.172930 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.185158 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.187059 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.202338 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.223290 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.228392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.228436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.228445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.228466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.228479 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.233685 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.245150 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.260959 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.272828 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.286301 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.296419 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.309770 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.320534 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.336918 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.337118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.337165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.337176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.337194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.337208 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.350509 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.362570 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:08Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.441332 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.441382 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.441390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.441404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.441416 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.543860 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.543906 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.543917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.543935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.543947 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.624866 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 10:47:14.62421327 +0000 UTC Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.647516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.647593 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.647606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.647624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.647639 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.718589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.718631 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.718589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:08 crc kubenswrapper[4982]: E0122 05:46:08.718758 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:08 crc kubenswrapper[4982]: E0122 05:46:08.718845 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:08 crc kubenswrapper[4982]: E0122 05:46:08.718925 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.750191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.750224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.750233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.751047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.751076 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.854258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.854300 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.854310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.854327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.854339 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.957733 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.957788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.957802 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.957824 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:08 crc kubenswrapper[4982]: I0122 05:46:08.957840 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:08Z","lastTransitionTime":"2026-01-22T05:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.061772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.061835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.061887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.061919 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.061942 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.164983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.165054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.165068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.165089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.165104 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.194905 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3" exitCode=0 Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.195036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.219844 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.241070 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.266571 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.267977 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.268022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.268032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.268049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.268070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.281380 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.292501 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.305027 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.318785 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.333343 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.345362 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.363790 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.370737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.370803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.370821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.370869 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.370891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.376806 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.389939 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.400893 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.424423 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.471354 4982 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.478341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.478373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.478388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.478406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.478417 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.581766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.581798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.581808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.581824 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.581835 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.625557 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 09:58:35.858434409 +0000 UTC Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.683679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.684224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.684236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.684253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.684265 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.738282 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.766037 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.786006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.786039 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.786048 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.786064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.786074 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.789637 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.819964 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.837727 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.852944 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.871670 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.889264 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.889391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.889457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.889570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.889648 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.893324 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.906034 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.920173 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.941344 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.964074 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.977292 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.992422 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.992500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.992515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.992535 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.992554 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:09Z","lastTransitionTime":"2026-01-22T05:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:09 crc kubenswrapper[4982]: I0122 05:46:09.997039 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.095399 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.095456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.095474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.095499 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.095518 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.198028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.198078 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.198088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.198103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.198114 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.204048 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.204498 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.210158 4982 generic.go:334] "Generic (PLEG): container finished" podID="64ec5ffc-3edf-4820-bc21-58b1ace5615c" containerID="cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57" exitCode=0 Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.210232 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerDied","Data":"cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.223978 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.247841 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.247905 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.263989 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.279150 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.293618 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.302130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.302181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.302199 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.302225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.302241 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.306088 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.320972 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.338685 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.353301 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.371234 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.385893 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.401717 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.405413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.405466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.405475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.405494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.405505 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.418898 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.438917 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.454651 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.471957 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.487814 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.507301 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.508598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.508677 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.508697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.508729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.508751 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.522680 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.551096 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.566499 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.583029 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.600433 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.611708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.611755 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.611768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.611789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.611805 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.614981 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.626955 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 21:43:27.037718949 +0000 UTC Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.628397 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.641608 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.652462 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.668308 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.714611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.714659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.714684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.714706 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.714720 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.718228 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.718255 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.718276 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:10 crc kubenswrapper[4982]: E0122 05:46:10.718345 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:10 crc kubenswrapper[4982]: E0122 05:46:10.718501 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:10 crc kubenswrapper[4982]: E0122 05:46:10.718818 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.818557 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.818595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.818606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.818626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.818637 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.922362 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.922434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.922456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.922487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:10 crc kubenswrapper[4982]: I0122 05:46:10.922510 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:10Z","lastTransitionTime":"2026-01-22T05:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.026173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.026259 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.026285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.026321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.026351 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.129708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.130144 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.130273 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.130390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.130519 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.220807 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" event={"ID":"64ec5ffc-3edf-4820-bc21-58b1ace5615c","Type":"ContainerStarted","Data":"d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.220949 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.220877 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.233291 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.233343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.233360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.233383 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.233401 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.251508 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.260882 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.273832 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.294553 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.312353 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.336482 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.337623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.337704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.337727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.337756 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.337775 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.360051 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.379727 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.413102 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.427839 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.441726 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.441787 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.441803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.441827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.441874 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.450078 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.476521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.498773 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.516942 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.535599 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.546135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.546205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.546225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.546255 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.546275 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.557281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.557369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.557392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.557417 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.557436 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.566045 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.576276 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.580975 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.581047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.581062 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.581085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.581099 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.587599 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.595428 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.599969 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.600009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.600019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.600039 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.600049 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.601871 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.616690 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.620648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.620693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.620703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.620719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.620744 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.622636 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.627962 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 14:17:01.437983731 +0000 UTC Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.632360 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.636900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.636929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.636937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.636952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.636962 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.640888 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.648595 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: E0122 05:46:11.648728 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.650522 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.650562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.650576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.650600 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.650613 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.656485 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.668614 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.681301 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.692595 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.709026 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.725500 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.744616 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.752965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.753003 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.753011 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.753027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.753039 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.757932 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.777916 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:11Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.855871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.855935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.855950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.855971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.855984 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.958747 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.958813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.958831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.958908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:11 crc kubenswrapper[4982]: I0122 05:46:11.958927 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:11Z","lastTransitionTime":"2026-01-22T05:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.061945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.062013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.062027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.062056 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.062069 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.164662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.164700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.164712 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.164896 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.164964 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.230252 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.267353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.267420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.267433 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.267454 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.267465 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.370005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.370046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.370056 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.370072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.370083 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.473314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.473372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.473386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.473406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.473419 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.576191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.576244 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.576254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.576272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.576283 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.628791 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 07:43:15.085326778 +0000 UTC Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.680579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.680608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.680619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.680632 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.680642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.718445 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.718485 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:12 crc kubenswrapper[4982]: E0122 05:46:12.718653 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.718670 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:12 crc kubenswrapper[4982]: E0122 05:46:12.719033 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:12 crc kubenswrapper[4982]: E0122 05:46:12.718927 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.783536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.783598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.783611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.783636 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.783651 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.886968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.887020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.887038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.887057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.887071 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.990218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.990266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.990279 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.990298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:12 crc kubenswrapper[4982]: I0122 05:46:12.990312 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:12Z","lastTransitionTime":"2026-01-22T05:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.093394 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.093470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.093484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.093505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.093520 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.196614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.196687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.196699 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.196720 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.196736 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.235960 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/0.log" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.239339 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44" exitCode=1 Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.239394 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.241193 4982 scope.go:117] "RemoveContainer" containerID="c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.266366 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.289344 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.300613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.300679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.300688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.300704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.300715 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.307485 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.321156 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.336270 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.358708 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.378372 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.393214 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.404158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.404225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.404238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.404257 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.404275 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.406238 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.427913 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.443474 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.469165 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.484393 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.501816 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:12Z\\\",\\\"message\\\":\\\"ork/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 05:46:12.839172 6245 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:12.839195 6245 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:12.839228 6245 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 05:46:12.839296 6245 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:12.839316 6245 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 05:46:12.839322 6245 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 05:46:12.839375 6245 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 05:46:12.839399 6245 factory.go:656] Stopping watch factory\\\\nI0122 05:46:12.839415 6245 ovnkube.go:599] Stopped ovnkube\\\\nI0122 05:46:12.839446 6245 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:12.839459 6245 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 05:46:12.839467 6245 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 05:46:12.839475 6245 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 05:46:12.839487 6245 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 05:46:12.839540 6245 handler.go:208] Removed *v1.EgressIP event han\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:13Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.506786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.506876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.506896 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.506915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.506928 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.609429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.609490 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.609501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.609536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.609550 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.629870 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 11:25:28.201514466 +0000 UTC Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.711629 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.711685 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.711697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.711715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.711732 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.815434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.815492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.815511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.815540 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.815560 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.918412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.918460 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.918473 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.918490 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:13 crc kubenswrapper[4982]: I0122 05:46:13.918501 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:13Z","lastTransitionTime":"2026-01-22T05:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.020993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.021045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.021055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.021072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.021084 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.123623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.123692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.123707 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.123729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.123745 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.226377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.226412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.226422 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.226436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.226446 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.247715 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/0.log" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.251934 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.252105 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.275290 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.295978 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.309552 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.330276 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.330920 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.330960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.330973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.330992 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.331005 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.344644 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.366069 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:12Z\\\",\\\"message\\\":\\\"ork/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 05:46:12.839172 6245 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:12.839195 6245 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:12.839228 6245 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 05:46:12.839296 6245 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:12.839316 6245 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 05:46:12.839322 6245 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 05:46:12.839375 6245 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 05:46:12.839399 6245 factory.go:656] Stopping watch factory\\\\nI0122 05:46:12.839415 6245 ovnkube.go:599] Stopped ovnkube\\\\nI0122 05:46:12.839446 6245 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:12.839459 6245 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 05:46:12.839467 6245 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 05:46:12.839475 6245 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 05:46:12.839487 6245 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 05:46:12.839540 6245 handler.go:208] Removed *v1.EgressIP event han\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.388096 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.409202 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.424786 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.431909 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.432048 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:46:30.432030147 +0000 UTC m=+51.270668160 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.432163 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.432288 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.432333 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:30.432323954 +0000 UTC m=+51.270961947 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.435718 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.435808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.435829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.435890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.435916 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.441154 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.457944 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.472789 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.488121 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.503037 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.533530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.533656 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.533729 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:30.533714124 +0000 UTC m=+51.372352127 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.540040 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.540103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.540124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.540152 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.540176 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.630834 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 10:15:31.171652815 +0000 UTC Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.644014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.644085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.644111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.644139 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.644161 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.719046 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.719112 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.719205 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.719352 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.719046 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.719463 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.735484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.735561 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735718 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735746 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735763 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735816 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735885 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735912 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735828 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:30.735806647 +0000 UTC m=+51.574444660 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:14 crc kubenswrapper[4982]: E0122 05:46:14.735985 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:46:30.735959901 +0000 UTC m=+51.574597944 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.747142 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.747232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.747264 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.747299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.747327 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.853270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.853360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.853394 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.853430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.853456 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.957213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.957267 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.957279 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.957301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:14 crc kubenswrapper[4982]: I0122 05:46:14.957316 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:14Z","lastTransitionTime":"2026-01-22T05:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.060172 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.060216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.060233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.060257 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.060278 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.164148 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.164244 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.164270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.164304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.164330 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.259085 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/1.log" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.260406 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/0.log" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.263572 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" exitCode=1 Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.263624 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.263756 4982 scope.go:117] "RemoveContainer" containerID="c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.264899 4982 scope.go:117] "RemoveContainer" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" Jan 22 05:46:15 crc kubenswrapper[4982]: E0122 05:46:15.265222 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.266911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.266937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.266966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.266980 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.266991 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.331409 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:12Z\\\",\\\"message\\\":\\\"ork/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 05:46:12.839172 6245 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:12.839195 6245 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:12.839228 6245 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 05:46:12.839296 6245 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:12.839316 6245 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 05:46:12.839322 6245 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 05:46:12.839375 6245 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 05:46:12.839399 6245 factory.go:656] Stopping watch factory\\\\nI0122 05:46:12.839415 6245 ovnkube.go:599] Stopped ovnkube\\\\nI0122 05:46:12.839446 6245 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:12.839459 6245 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 05:46:12.839467 6245 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 05:46:12.839475 6245 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 05:46:12.839487 6245 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 05:46:12.839540 6245 handler.go:208] Removed *v1.EgressIP event han\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.354082 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.370147 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.370207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.370216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.370248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.370261 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.377158 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.389752 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.402425 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.416872 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.429828 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.442596 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.456392 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.473315 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.473668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.473808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.473953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.474049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.474154 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.489229 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.504201 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.524640 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.539199 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:15Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.577761 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.577811 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.577823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.577843 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.577881 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.631457 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 02:59:21.090761032 +0000 UTC Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.682162 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.682234 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.682254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.682280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.682302 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.785293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.785354 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.785366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.785384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.785398 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.888401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.888688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.888800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.888927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.889017 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.992473 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.992742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.992820 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.992970 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:15 crc kubenswrapper[4982]: I0122 05:46:15.993067 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:15Z","lastTransitionTime":"2026-01-22T05:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.021914 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg"] Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.022460 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.025710 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.026256 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.046181 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.061311 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.076638 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.093391 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.096443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.096506 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.096525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.096555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.096575 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.109223 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.129355 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.151151 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.151242 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d968804b-0683-488d-bc01-a0a16b689d6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.151330 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68mz9\" (UniqueName: \"kubernetes.io/projected/d968804b-0683-488d-bc01-a0a16b689d6f-kube-api-access-68mz9\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.151379 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.151477 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.179622 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5aab28005d80d20243e88df2fae8c12c2df005de4d8e05fff11ed187df75d44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:12Z\\\",\\\"message\\\":\\\"ork/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 05:46:12.839172 6245 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:12.839195 6245 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:12.839228 6245 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 05:46:12.839296 6245 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:12.839316 6245 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 05:46:12.839322 6245 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 05:46:12.839375 6245 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 05:46:12.839399 6245 factory.go:656] Stopping watch factory\\\\nI0122 05:46:12.839415 6245 ovnkube.go:599] Stopped ovnkube\\\\nI0122 05:46:12.839446 6245 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:12.839459 6245 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 05:46:12.839467 6245 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 05:46:12.839475 6245 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 05:46:12.839487 6245 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 05:46:12.839540 6245 handler.go:208] Removed *v1.EgressIP event han\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.199197 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.200110 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.200339 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.200496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.200689 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.200908 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.221039 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.240061 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.252749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68mz9\" (UniqueName: \"kubernetes.io/projected/d968804b-0683-488d-bc01-a0a16b689d6f-kube-api-access-68mz9\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.252819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.252891 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.252926 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d968804b-0683-488d-bc01-a0a16b689d6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.253831 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.254099 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d968804b-0683-488d-bc01-a0a16b689d6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.258651 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.265683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d968804b-0683-488d-bc01-a0a16b689d6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.272281 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/1.log" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.278221 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.278335 4982 scope.go:117] "RemoveContainer" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.278597 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.288896 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68mz9\" (UniqueName: \"kubernetes.io/projected/d968804b-0683-488d-bc01-a0a16b689d6f-kube-api-access-68mz9\") pod \"ovnkube-control-plane-749d76644c-9d4vg\" (UID: \"d968804b-0683-488d-bc01-a0a16b689d6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.299262 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.304270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.304345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.304358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.304389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.304406 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.319812 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.338060 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.340253 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.359309 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: W0122 05:46:16.360832 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd968804b_0683_488d_bc01_a0a16b689d6f.slice/crio-d38b04cb2321fd744c1e4b219b980ac9c2734800df1880aaf1ef2fdd99147407 WatchSource:0}: Error finding container d38b04cb2321fd744c1e4b219b980ac9c2734800df1880aaf1ef2fdd99147407: Status 404 returned error can't find the container with id d38b04cb2321fd744c1e4b219b980ac9c2734800df1880aaf1ef2fdd99147407 Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.379089 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.406116 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.407640 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.407670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.407682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.407700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.407713 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.429716 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.449030 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.473998 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.487426 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.500630 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.512958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.513016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.513041 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.513064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.513078 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.515970 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.538708 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.555730 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.568554 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.581723 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.600672 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.616035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.616160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.616228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.616301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.616383 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.632402 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 16:15:38.064532736 +0000 UTC Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.718104 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.718397 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.718231 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.719099 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.718179 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.719310 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.719472 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.719521 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.719538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.719556 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.719572 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.765715 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gxwx2"] Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.767474 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.767668 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.787597 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.802211 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.819879 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.821955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.821993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.822005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.822024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.822035 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.835678 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.856036 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.862641 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.862707 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z67hv\" (UniqueName: \"kubernetes.io/projected/377cd579-2ade-48ea-ad2a-44d1546fd5fb-kube-api-access-z67hv\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.869517 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.884034 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.902157 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.923694 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.925184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.925251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.925268 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.925291 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.925306 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:16Z","lastTransitionTime":"2026-01-22T05:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.947113 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.962805 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.963619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z67hv\" (UniqueName: \"kubernetes.io/projected/377cd579-2ade-48ea-ad2a-44d1546fd5fb-kube-api-access-z67hv\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.963705 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.963890 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:16 crc kubenswrapper[4982]: E0122 05:46:16.963968 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:17.463951681 +0000 UTC m=+38.302589684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.977882 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.981829 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z67hv\" (UniqueName: \"kubernetes.io/projected/377cd579-2ade-48ea-ad2a-44d1546fd5fb-kube-api-access-z67hv\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.988343 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:16 crc kubenswrapper[4982]: I0122 05:46:16.998484 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:16Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.010701 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.023603 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.029439 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.029476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.029485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.029499 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.029512 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.132427 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.132613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.132690 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.132755 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.132829 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.235055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.235089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.235098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.235114 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.235124 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.282576 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" event={"ID":"d968804b-0683-488d-bc01-a0a16b689d6f","Type":"ContainerStarted","Data":"7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.282655 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" event={"ID":"d968804b-0683-488d-bc01-a0a16b689d6f","Type":"ContainerStarted","Data":"a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.282676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" event={"ID":"d968804b-0683-488d-bc01-a0a16b689d6f","Type":"ContainerStarted","Data":"d38b04cb2321fd744c1e4b219b980ac9c2734800df1880aaf1ef2fdd99147407"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.303901 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.327974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.337909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.337973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.337983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.338007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.338019 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.345131 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.367752 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.385940 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.403067 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.422018 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.440984 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.441322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.441511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.441646 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.441762 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.455521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.469712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:17 crc kubenswrapper[4982]: E0122 05:46:17.469938 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:17 crc kubenswrapper[4982]: E0122 05:46:17.470024 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:18.47000065 +0000 UTC m=+39.308638693 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.473983 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.488678 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.505266 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.530634 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.544500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.544842 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.545023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.545135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.545276 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.547480 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.564916 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.582037 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.597460 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:17Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.632819 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 22:40:46.49585168 +0000 UTC Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.649069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.649134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.649152 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.649170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.649181 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.752436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.752517 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.752544 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.752581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.752611 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.856122 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.856201 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.856221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.856251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.856272 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.960422 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.960494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.960514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.960541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:17 crc kubenswrapper[4982]: I0122 05:46:17.960562 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:17Z","lastTransitionTime":"2026-01-22T05:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.064660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.064696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.064705 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.064720 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.064730 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.167140 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.167187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.167199 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.167218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.167231 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.270091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.270158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.270184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.270216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.270239 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.373128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.373510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.373581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.373647 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.373704 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.477168 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.477270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.477290 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.477318 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.477338 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.479803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.480257 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.480504 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:20.480470703 +0000 UTC m=+41.319108736 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.580527 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.581157 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.581325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.581651 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.581793 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.634105 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 17:34:51.138006117 +0000 UTC Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.685061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.685129 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.685148 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.685177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.685197 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.719408 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.719469 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.719545 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.719547 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.719648 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.719801 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.719892 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:18 crc kubenswrapper[4982]: E0122 05:46:18.720216 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.788542 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.788589 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.788597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.788615 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.788626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.891478 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.891547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.891571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.891604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.891627 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.994752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.994884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.994907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.994935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:18 crc kubenswrapper[4982]: I0122 05:46:18.994993 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:18Z","lastTransitionTime":"2026-01-22T05:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.098501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.098596 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.098620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.098666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.098690 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.202379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.202455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.202472 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.202496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.202517 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.305698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.305804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.305823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.305882 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.305906 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.408769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.408835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.408863 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.408888 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.408902 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.512007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.512067 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.512081 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.512104 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.512117 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.615229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.615336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.615353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.615375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.615390 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.635446 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 09:18:48.560124406 +0000 UTC Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.718939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.718987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.719017 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.719036 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.719047 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.743392 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.760971 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.781496 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.796074 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.808931 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.821926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.821985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.822004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.822030 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.822053 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.824314 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.840221 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.855239 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.872821 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.887212 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.901354 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.919208 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.925130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.925213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.925228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.925248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.925261 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:19Z","lastTransitionTime":"2026-01-22T05:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.941950 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.963938 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:19 crc kubenswrapper[4982]: I0122 05:46:19.986898 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:19Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.007696 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:20Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.028229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.028305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.028327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.028352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.028370 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.132220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.132272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.132281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.132298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.132326 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.235890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.235951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.235963 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.235988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.236002 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.339533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.339607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.339616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.339634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.339645 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.443489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.443565 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.443614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.443666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.443693 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.484810 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.486535 4982 scope.go:117] "RemoveContainer" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.487141 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.501914 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.502170 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.502266 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:24.502241262 +0000 UTC m=+45.340879305 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.547604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.547697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.547752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.547778 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.547915 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.636479 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 16:55:41.992988615 +0000 UTC Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.651121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.651182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.651198 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.651220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.651234 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.718368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.718433 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.718368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.718587 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.718604 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.718706 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.719368 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:20 crc kubenswrapper[4982]: E0122 05:46:20.720107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.760108 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.760191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.760211 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.760237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.760256 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.862997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.863082 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.863100 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.863126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.863147 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.966999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.967445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.967607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.967798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:20 crc kubenswrapper[4982]: I0122 05:46:20.968004 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:20Z","lastTransitionTime":"2026-01-22T05:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.071742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.071807 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.071824 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.071848 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.071916 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.175831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.175925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.175949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.175974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.175996 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.278619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.278673 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.278690 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.278714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.278731 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.382090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.382160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.382185 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.382215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.382241 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.486129 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.486457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.486585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.486670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.486747 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.590812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.590915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.590943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.590974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.590996 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.637384 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 10:30:11.821659338 +0000 UTC Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.694191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.694242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.694254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.694275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.694290 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.797930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.798006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.798027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.798064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.798097 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.872613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.872658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.872670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.872689 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.872702 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.890411 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:21Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.896297 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.896343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.896363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.896392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.896410 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.913233 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:21Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.917621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.917694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.917722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.917758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.917784 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.932106 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:21Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.937913 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.937981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.938001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.938029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.938054 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.953053 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:21Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.956974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.957022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.957040 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.957065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.957081 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.970415 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:21Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:21 crc kubenswrapper[4982]: E0122 05:46:21.970535 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.972971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.973012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.973026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.973046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:21 crc kubenswrapper[4982]: I0122 05:46:21.973060 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:21Z","lastTransitionTime":"2026-01-22T05:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.076825 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.076914 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.076932 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.076959 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.076981 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.180398 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.180450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.180462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.180482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.180493 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.283327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.283397 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.283415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.283441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.283460 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.387389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.387482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.387510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.387548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.387574 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.490450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.490503 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.490514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.490532 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.490544 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.594181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.594228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.594241 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.594262 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.594278 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.638377 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 15:25:41.753095708 +0000 UTC Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.697143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.697212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.697236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.697266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.697287 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.718648 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.718714 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.718768 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.718679 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:22 crc kubenswrapper[4982]: E0122 05:46:22.718929 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:22 crc kubenswrapper[4982]: E0122 05:46:22.719112 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:22 crc kubenswrapper[4982]: E0122 05:46:22.719321 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:22 crc kubenswrapper[4982]: E0122 05:46:22.719457 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.800641 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.800717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.800741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.800771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.800793 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.905143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.905222 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.905246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.905271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:22 crc kubenswrapper[4982]: I0122 05:46:22.905289 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:22Z","lastTransitionTime":"2026-01-22T05:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.008411 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.008504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.008530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.008560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.008579 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.112124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.112180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.112192 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.112213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.112226 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.215924 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.215989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.216002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.216023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.216039 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.319452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.319528 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.319552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.319587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.319612 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.422952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.423012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.423027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.423045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.423056 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.526619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.527074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.527502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.527652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.527801 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.631732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.631771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.631782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.631796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.631808 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.638933 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 19:17:37.274380542 +0000 UTC Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.735366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.735406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.735418 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.735431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.735444 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.839213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.839317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.839341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.839375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.839398 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.942816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.942989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.943371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.943627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:23 crc kubenswrapper[4982]: I0122 05:46:23.943833 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:23Z","lastTransitionTime":"2026-01-22T05:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.048700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.048772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.048792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.048817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.048838 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.152814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.153012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.153034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.153061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.153080 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.257153 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.257226 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.257252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.257284 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.257311 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.360619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.360694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.360717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.360747 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.360771 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.463265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.463691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.463833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.464101 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.464234 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.555045 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.555410 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.555842 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:32.555806214 +0000 UTC m=+53.394444257 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.567935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.567985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.568004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.568029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.568047 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.639630 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 16:57:32.553891245 +0000 UTC Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.671287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.671705 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.671902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.672085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.672253 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.718419 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.718502 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.718546 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.718596 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.718512 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.718698 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.718838 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:24 crc kubenswrapper[4982]: E0122 05:46:24.719005 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.776985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.777057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.777079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.777109 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.777134 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.881453 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.881498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.881514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.881535 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.881547 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.985063 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.985127 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.985144 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.985172 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:24 crc kubenswrapper[4982]: I0122 05:46:24.985194 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:24Z","lastTransitionTime":"2026-01-22T05:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.089188 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.089655 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.089806 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.090022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.090185 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.194191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.194248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.194265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.194293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.194311 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.297215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.297276 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.297292 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.297313 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.297329 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.350203 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.383007 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.393219 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.399585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.399628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.399639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.399659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.399671 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.408772 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.435525 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.450983 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.469169 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.487338 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504320 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.504619 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.518650 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.533255 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.555501 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.570919 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.589279 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.603484 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.607528 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.607585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.607606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.607631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.607644 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.619870 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.634470 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.640732 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 05:38:42.786846555 +0000 UTC Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.648315 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:25Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.710743 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.710781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.710790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.710805 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.710817 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.813592 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.813626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.813637 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.813652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.813662 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.917074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.917136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.917145 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.917162 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:25 crc kubenswrapper[4982]: I0122 05:46:25.917172 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:25Z","lastTransitionTime":"2026-01-22T05:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.020627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.020675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.020686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.020704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.020716 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.122827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.122884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.122892 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.122906 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.122916 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.226807 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.227168 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.227346 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.227519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.227647 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.330644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.330703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.330718 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.330739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.330752 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.433978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.434065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.434170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.434206 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.434228 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.537903 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.538147 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.538208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.538274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.538332 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.640939 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:56:24.249751421 +0000 UTC Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.641117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.641161 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.641171 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.641188 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.641198 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.718912 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.718942 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.718965 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:26 crc kubenswrapper[4982]: E0122 05:46:26.719638 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.719161 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:26 crc kubenswrapper[4982]: E0122 05:46:26.719719 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:26 crc kubenswrapper[4982]: E0122 05:46:26.719359 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:26 crc kubenswrapper[4982]: E0122 05:46:26.719987 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.743609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.743668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.743683 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.743706 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.743721 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.847013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.847073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.847090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.847114 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.847135 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.952219 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.952268 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.952280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.952301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:26 crc kubenswrapper[4982]: I0122 05:46:26.952316 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:26Z","lastTransitionTime":"2026-01-22T05:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.055564 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.055921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.055935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.055956 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.055970 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.159093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.159147 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.159156 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.159174 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.159184 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.262822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.262921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.262940 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.262964 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.262980 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.365160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.365196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.365207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.365223 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.365234 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.468179 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.468220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.468230 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.468247 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.468259 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.570602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.570635 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.570644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.570659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.570668 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.641461 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 05:21:58.848150442 +0000 UTC Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.674025 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.674079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.674094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.674118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.674153 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.777331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.777662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.777744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.777813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.777940 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.882012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.882059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.882072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.882090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.882103 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.984902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.985310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.985463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.985605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:27 crc kubenswrapper[4982]: I0122 05:46:27.985785 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:27Z","lastTransitionTime":"2026-01-22T05:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.089609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.089916 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.090052 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.090140 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.090216 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.192991 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.193069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.193086 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.193111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.193133 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.295650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.295973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.296054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.296126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.296202 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.399301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.399340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.399349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.399366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.399378 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.502454 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.502515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.502534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.502561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.502579 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.605406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.605490 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.605515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.605548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.605571 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.642599 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 01:39:23.504231678 +0000 UTC Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.708691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.708735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.708747 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.708765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.708779 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.718988 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.719036 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.719070 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.719055 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:28 crc kubenswrapper[4982]: E0122 05:46:28.719149 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:28 crc kubenswrapper[4982]: E0122 05:46:28.719258 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:28 crc kubenswrapper[4982]: E0122 05:46:28.719416 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:28 crc kubenswrapper[4982]: E0122 05:46:28.719609 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.812279 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.812325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.812334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.812352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.812362 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.915519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.915582 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.915602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.915638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:28 crc kubenswrapper[4982]: I0122 05:46:28.915661 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:28Z","lastTransitionTime":"2026-01-22T05:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.020383 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.020445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.020462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.020486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.020504 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.122489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.122557 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.122575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.122604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.122623 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.225873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.226377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.226495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.226580 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.226652 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.328676 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.328728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.328741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.328766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.328781 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.432768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.432900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.432928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.432963 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.432987 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.536077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.536166 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.536193 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.536224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.536249 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.639453 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.639925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.640053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.640163 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.640281 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.643411 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 23:11:20.579013641 +0000 UTC Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.739724 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.744065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.744102 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.744117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.744134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.744145 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.754370 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.766469 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.791636 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.806318 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.830451 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.847669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.847727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.847781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.847834 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.847897 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.848167 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.862342 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.879644 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.896347 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.910467 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.929960 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.945795 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.950790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.950821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.950832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.950875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.950888 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:29Z","lastTransitionTime":"2026-01-22T05:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.957078 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.973612 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:29 crc kubenswrapper[4982]: I0122 05:46:29.994639 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:29Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.015096 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:30Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.053781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.053818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.053828 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.053861 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.053873 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.157134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.157193 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.157212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.157238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.157256 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.260411 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.260547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.260613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.260657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.260678 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.363526 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.363574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.363586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.363607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.363620 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.466166 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.466282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.466308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.466343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.466366 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.525503 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.525703 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:47:02.525665448 +0000 UTC m=+83.364303491 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.525898 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.526095 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.526187 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:47:02.526168622 +0000 UTC m=+83.364806665 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.569658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.569701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.569710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.569729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.569741 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.626391 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.626526 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.626888 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:47:02.626828223 +0000 UTC m=+83.465466256 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.644618 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 02:13:04.022092674 +0000 UTC Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.673945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.674030 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.674050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.674077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.674099 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.718575 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.718593 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.718712 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.718570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.718952 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.719107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.719447 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.719825 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.777203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.777270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.777288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.777312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.777329 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.828443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.828619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829017 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829051 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829072 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829137 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:47:02.829115821 +0000 UTC m=+83.667753854 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829496 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829567 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829591 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:30 crc kubenswrapper[4982]: E0122 05:46:30.829693 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:47:02.829661365 +0000 UTC m=+83.668299398 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.881218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.881728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.881954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.882212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.882491 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.985578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.985971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.986151 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.986285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:30 crc kubenswrapper[4982]: I0122 05:46:30.986407 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:30Z","lastTransitionTime":"2026-01-22T05:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.090134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.090214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.090232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.090266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.090290 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.193520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.193597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.193622 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.193654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.193679 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.297592 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.297669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.297688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.297713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.297732 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.401349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.401398 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.401415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.401441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.401460 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.505237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.505291 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.505303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.505321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.505332 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.608756 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.609172 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.609355 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.609483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.609599 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.645371 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 17:32:04.321538256 +0000 UTC Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.713580 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.713622 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.713634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.713657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.713667 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.817778 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.817832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.817844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.817886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.817895 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.921718 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.921789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.921808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.921833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:31 crc kubenswrapper[4982]: I0122 05:46:31.921887 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:31Z","lastTransitionTime":"2026-01-22T05:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.025609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.026023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.026174 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.026374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.026507 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.076553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.076946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.077138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.077294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.077465 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.099414 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:32Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.105466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.105553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.105575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.105605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.105627 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.127401 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:32Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.134784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.135079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.135263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.135451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.135600 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.159296 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:32Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.165765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.165818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.165831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.165876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.165892 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.185967 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:32Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.191497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.191586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.191601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.191623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.191637 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.208280 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:32Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.209309 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.212014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.212093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.212115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.212149 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.212184 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.315282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.315338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.315356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.315377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.315395 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.418373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.418440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.418455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.418486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.418503 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.522261 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.522353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.522381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.522456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.522478 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.626200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.626243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.626254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.626273 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.626287 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.646394 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 04:09:48.51656258 +0000 UTC Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.653620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.653820 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.654167 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:46:48.654145879 +0000 UTC m=+69.492783892 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.718603 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.718604 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.718715 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.719262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.719488 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.719770 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.719911 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:32 crc kubenswrapper[4982]: E0122 05:46:32.720182 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.730111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.730174 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.730202 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.730235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.730259 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.834899 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.834978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.834997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.835026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.835048 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.939256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.939347 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.939373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.939412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:32 crc kubenswrapper[4982]: I0122 05:46:32.939437 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:32Z","lastTransitionTime":"2026-01-22T05:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.043985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.044052 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.044076 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.044108 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.044132 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.147847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.148317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.148485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.148706 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.148934 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.251498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.251587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.251621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.251654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.251684 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.354690 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.354754 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.354767 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.354789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.354802 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.458494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.458560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.458578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.458607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.458626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.562634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.562696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.562713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.562741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.562760 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.647620 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 03:24:19.923914598 +0000 UTC Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.666006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.666056 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.666071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.666089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.666103 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.719778 4982 scope.go:117] "RemoveContainer" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.769083 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.769131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.769142 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.769162 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.769177 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.871729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.872188 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.872205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.872228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.872243 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.975447 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.975483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.975498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.975517 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:33 crc kubenswrapper[4982]: I0122 05:46:33.975529 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:33Z","lastTransitionTime":"2026-01-22T05:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.080228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.080290 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.080311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.080337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.080357 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.183096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.183133 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.183146 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.183165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.183177 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.285463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.285539 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.285560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.285586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.285605 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.351624 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/1.log" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.355273 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.355771 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.374793 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.388819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.388915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.388936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.388966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.388985 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.394792 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.410787 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.432974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.450821 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.470521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.490190 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.493833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.493930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.493953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.493982 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.494004 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.518217 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.532991 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.545009 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.560361 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.572275 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.585799 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.596654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.596716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.596728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.596749 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.596762 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.599202 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.619562 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.634427 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.647957 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 16:28:48.308987817 +0000 UTC Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.648086 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:34Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.700253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.700295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.700308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.700330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.700344 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.718225 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.718276 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.718341 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:34 crc kubenswrapper[4982]: E0122 05:46:34.718417 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:34 crc kubenswrapper[4982]: E0122 05:46:34.718503 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:34 crc kubenswrapper[4982]: E0122 05:46:34.718948 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.718991 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:34 crc kubenswrapper[4982]: E0122 05:46:34.719087 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.802341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.802385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.802397 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.802415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.802426 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.905501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.905536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.905547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.905561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:34 crc kubenswrapper[4982]: I0122 05:46:34.905571 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:34Z","lastTransitionTime":"2026-01-22T05:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.008952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.009004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.009015 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.009035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.009048 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.111775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.111819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.111829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.111843 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.111870 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.214897 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.214966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.214986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.215012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.215031 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.318638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.318687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.318700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.318719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.318731 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.360601 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/2.log" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.361218 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/1.log" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.364536 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" exitCode=1 Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.364576 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.364619 4982 scope.go:117] "RemoveContainer" containerID="18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.365532 4982 scope.go:117] "RemoveContainer" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" Jan 22 05:46:35 crc kubenswrapper[4982]: E0122 05:46:35.365683 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.382521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.399918 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.413441 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.420798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.421048 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.421178 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.421249 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.421310 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.427219 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.440103 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.454792 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.471530 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.487643 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.503184 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525346 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.525875 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c14d45773d7bbbdb8f046443e99c1786d929b9d3f64e8031e09c7fdc60cef7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:14Z\\\",\\\"message\\\":\\\"r service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default\\\\nI0122 05:46:14.161685 6384 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161698 6384 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-gdpxx\\\\nI0122 05:46:14.161719 6384 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-gdpxx in node crc\\\\nF0122 05:46:14.161730 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:14Z is after 2025-08-24T17:21:41Z]\\\\nI0122\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.539830 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.554795 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.569384 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.586108 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.603219 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.621406 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.627882 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.627924 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.627936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.627953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.627965 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.637871 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:35Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.648934 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:02:32.01140605 +0000 UTC Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.730742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.731018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.731112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.731201 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.731278 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.835202 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.835509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.835569 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.835598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.835648 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.938742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.938773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.938783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.938796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:35 crc kubenswrapper[4982]: I0122 05:46:35.938805 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:35Z","lastTransitionTime":"2026-01-22T05:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.041698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.041778 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.041818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.041847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.041892 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.145294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.145345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.145355 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.145370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.145380 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.248047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.248131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.248152 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.248185 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.248209 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.351520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.351579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.351601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.351638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.351675 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.371759 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/2.log" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.378576 4982 scope.go:117] "RemoveContainer" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" Jan 22 05:46:36 crc kubenswrapper[4982]: E0122 05:46:36.378894 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.405690 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.425182 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.446379 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.455013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.455050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.455062 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.455080 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.455094 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.468729 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.489199 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.506262 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.521709 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.539309 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.559316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.559373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.559391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.559419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.559444 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.561297 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.574977 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.592983 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.609947 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.625274 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.638601 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.649764 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 11:13:57.405787179 +0000 UTC Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.658712 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.662533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.662561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.662570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.662586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.662596 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.670136 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.679742 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:36Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.718193 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:36 crc kubenswrapper[4982]: E0122 05:46:36.718321 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.718518 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:36 crc kubenswrapper[4982]: E0122 05:46:36.718585 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.718743 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:36 crc kubenswrapper[4982]: E0122 05:46:36.719036 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.719336 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:36 crc kubenswrapper[4982]: E0122 05:46:36.719471 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.764451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.764497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.764510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.764529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.764537 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.867304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.867370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.867380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.867396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.867407 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.970602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.970642 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.970652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.970670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:36 crc kubenswrapper[4982]: I0122 05:46:36.970681 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:36Z","lastTransitionTime":"2026-01-22T05:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.073494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.073554 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.073588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.073612 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.073630 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.177204 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.177274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.177296 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.177329 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.177351 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.280749 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.280804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.280823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.280876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.280895 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.383331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.383386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.383403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.383427 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.383445 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.487249 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.487314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.487333 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.487360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.487380 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.594231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.594561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.594691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.594821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.594960 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.650277 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 13:43:34.484133963 +0000 UTC Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.698505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.698574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.698598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.698630 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.698653 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.801962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.802015 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.802087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.802122 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.802145 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.904504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.904564 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.904588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.904618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:37 crc kubenswrapper[4982]: I0122 05:46:37.904641 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:37Z","lastTransitionTime":"2026-01-22T05:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.007999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.008073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.008089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.008113 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.008131 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.111884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.111965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.111994 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.112027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.112050 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.215170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.215238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.215262 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.215295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.215321 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.317791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.318126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.318237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.318344 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.318448 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.421245 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.421309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.421330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.421356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.421374 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.524377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.524442 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.524460 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.524485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.524502 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.628086 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.628156 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.628179 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.628210 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.628233 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.651383 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 18:06:25.430163614 +0000 UTC Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.719013 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.719079 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.719130 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:38 crc kubenswrapper[4982]: E0122 05:46:38.719253 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:38 crc kubenswrapper[4982]: E0122 05:46:38.719396 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.719049 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:38 crc kubenswrapper[4982]: E0122 05:46:38.719668 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:38 crc kubenswrapper[4982]: E0122 05:46:38.719757 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.731251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.731298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.731314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.731337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.731355 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.834123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.834205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.834229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.834263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.834287 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.937426 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.937469 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.937486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.937509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:38 crc kubenswrapper[4982]: I0122 05:46:38.937524 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:38Z","lastTransitionTime":"2026-01-22T05:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.040813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.040898 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.040916 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.040943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.040959 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.144493 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.144571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.144591 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.144618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.144636 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.248454 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.248509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.248527 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.248553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.248573 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.351781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.351838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.351870 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.351890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.351902 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.455438 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.455791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.455978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.456182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.456344 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.560547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.560611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.560630 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.560658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.560676 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.653206 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 06:54:40.689024016 +0000 UTC Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.663846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.663915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.663927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.663946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.663960 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.744086 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.761638 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.766895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.766929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.766937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.766954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.766964 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.780327 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.799496 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.820066 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.839135 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.856323 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.870279 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.870331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.870341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.870365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.870378 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.876166 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.891980 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.906754 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.925292 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.938235 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.951400 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.966087 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.972871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.972926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.972946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.972970 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.972987 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:39Z","lastTransitionTime":"2026-01-22T05:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:39 crc kubenswrapper[4982]: I0122 05:46:39.989272 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:39Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.006135 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:40Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.023049 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:40Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.075238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.075274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.075284 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.075302 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.075312 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.179497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.179543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.179554 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.179570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.179580 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.282918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.282965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.282975 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.282993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.283007 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.386490 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.386538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.386552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.386575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.386594 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.490301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.490371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.490391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.490420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.490438 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.593669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.593714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.593730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.593753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.593769 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.653549 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 01:25:46.709592872 +0000 UTC Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.696195 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.696243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.696252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.696270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.696280 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.718750 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.718799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.718844 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.718806 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:40 crc kubenswrapper[4982]: E0122 05:46:40.719028 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:40 crc kubenswrapper[4982]: E0122 05:46:40.719155 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:40 crc kubenswrapper[4982]: E0122 05:46:40.719300 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:40 crc kubenswrapper[4982]: E0122 05:46:40.719357 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.799476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.799647 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.799666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.799724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.799744 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.902480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.902521 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.902533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.902553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:40 crc kubenswrapper[4982]: I0122 05:46:40.902566 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:40Z","lastTransitionTime":"2026-01-22T05:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.005440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.005524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.005555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.005589 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.005608 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.108513 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.108558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.108570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.108588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.108604 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.211340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.211380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.211389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.211404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.211413 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.314495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.314560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.314579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.314607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.314626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.418431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.418505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.418524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.418549 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.418566 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.521615 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.521675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.521761 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.521781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.521797 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.624419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.624466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.624477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.624495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.624503 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.654573 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:06:35.397585089 +0000 UTC Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.726369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.726455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.726474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.726506 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.726525 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.829504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.829560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.829576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.829600 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.829616 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.933052 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.933109 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.933121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.933167 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:41 crc kubenswrapper[4982]: I0122 05:46:41.933183 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:41Z","lastTransitionTime":"2026-01-22T05:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.035524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.035587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.035603 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.035624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.035636 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.138093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.138134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.138147 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.138165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.138179 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.241064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.241108 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.241118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.241135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.241145 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.328891 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.328933 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.328945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.328960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.328970 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.344816 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:42Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.349645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.349689 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.349699 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.349717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.349726 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.367034 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:42Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.371471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.371602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.371790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.371999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.372185 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.385983 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:42Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.390681 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.391012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.391214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.391322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.391520 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.405177 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:42Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.408174 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.408323 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.408430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.408537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.408638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.422232 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:42Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.422931 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.424525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.424563 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.424576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.424593 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.424604 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.526943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.526988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.526999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.527016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.527026 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.629952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.630017 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.630033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.630057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.630077 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.655530 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 08:06:52.517988065 +0000 UTC Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.718315 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.718364 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.718413 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.718445 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.718502 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.718631 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.718782 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:42 crc kubenswrapper[4982]: E0122 05:46:42.718941 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.732997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.733109 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.733130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.733193 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.733206 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.835935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.835985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.835995 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.836011 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.836022 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.938693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.938730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.938741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.938761 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:42 crc kubenswrapper[4982]: I0122 05:46:42.938774 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:42Z","lastTransitionTime":"2026-01-22T05:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.041454 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.042180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.042275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.042368 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.042459 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.145471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.145531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.145546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.145570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.145585 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.247928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.247986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.247999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.248020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.248032 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.350518 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.350583 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.350595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.350616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.350629 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.453179 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.453467 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.453589 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.453691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.453772 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.556250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.556316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.556332 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.556352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.556367 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.656845 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 03:33:54.632907366 +0000 UTC Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.659051 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.659085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.659096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.659113 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.659122 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.762587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.762642 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.762653 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.762674 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.762689 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.865881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.865915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.865926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.865946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.865961 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.970382 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.970436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.970448 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.970470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:43 crc kubenswrapper[4982]: I0122 05:46:43.970483 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:43Z","lastTransitionTime":"2026-01-22T05:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.073462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.073510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.073522 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.073543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.073555 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.176911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.176950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.176958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.176974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.176983 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.280023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.280100 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.280117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.280146 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.280186 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.382463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.382511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.382524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.382548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.382575 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.485396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.485445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.485462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.485487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.485502 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.588158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.588202 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.588214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.588232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.588245 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.658068 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 19:39:48.667259057 +0000 UTC Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.690796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.690878 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.690894 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.690914 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.690927 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.718569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.718577 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.718591 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.718722 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:44 crc kubenswrapper[4982]: E0122 05:46:44.718891 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:44 crc kubenswrapper[4982]: E0122 05:46:44.718972 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:44 crc kubenswrapper[4982]: E0122 05:46:44.719141 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:44 crc kubenswrapper[4982]: E0122 05:46:44.719285 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.793535 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.793584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.793593 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.793610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.793623 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.896553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.896614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.896628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.896648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.896664 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.999905 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.999961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.999978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:44 crc kubenswrapper[4982]: I0122 05:46:44.999998 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.000012 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:44Z","lastTransitionTime":"2026-01-22T05:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.103301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.103337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.103346 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.103361 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.103371 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.206377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.206476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.206494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.206513 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.206530 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.309487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.309536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.309566 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.309587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.309603 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.411903 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.411957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.411971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.411989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.411999 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.515256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.515311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.515324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.515345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.515360 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.618488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.618558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.618568 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.618586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.618603 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.658811 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 15:16:47.626013292 +0000 UTC Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.722046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.722088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.722097 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.722114 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.722124 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.825098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.825160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.825176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.825199 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.825214 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.929022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.929082 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.929106 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.929123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:45 crc kubenswrapper[4982]: I0122 05:46:45.929138 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:45Z","lastTransitionTime":"2026-01-22T05:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.032229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.032273 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.032282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.032301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.032311 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.135391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.135424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.135434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.135451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.135462 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.237922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.237981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.238000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.238024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.238041 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.340393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.340438 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.340447 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.340462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.340474 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.442391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.442448 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.442466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.442500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.442522 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.545365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.545418 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.545429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.545446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.545457 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.648721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.648782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.648792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.648810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.648821 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.659942 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 12:05:09.046759888 +0000 UTC Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.719177 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.719197 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.719270 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:46 crc kubenswrapper[4982]: E0122 05:46:46.719325 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.719436 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:46 crc kubenswrapper[4982]: E0122 05:46:46.719488 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:46 crc kubenswrapper[4982]: E0122 05:46:46.719598 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:46 crc kubenswrapper[4982]: E0122 05:46:46.719838 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.751404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.751450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.751461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.751478 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.751490 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.855447 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.855489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.855501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.855541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.855555 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.958659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.958705 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.958714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.958733 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:46 crc kubenswrapper[4982]: I0122 05:46:46.958746 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:46Z","lastTransitionTime":"2026-01-22T05:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.061409 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.061451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.061461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.061480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.061491 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.164618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.164662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.164676 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.164700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.164710 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.267574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.267624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.267639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.267656 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.267666 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.370441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.370484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.370497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.370514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.370526 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.473760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.473826 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.473845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.473910 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.473929 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.577022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.577073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.577084 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.577104 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.577117 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.661048 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 19:39:56.091420797 +0000 UTC Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.679957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.680002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.680012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.680028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.680037 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.782362 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.782400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.782412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.782429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.782440 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.884735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.884776 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.884785 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.884804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.884816 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.987363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.987606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.987708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.987804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:47 crc kubenswrapper[4982]: I0122 05:46:47.987918 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:47Z","lastTransitionTime":"2026-01-22T05:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.090547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.090594 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.090605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.090627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.090642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.193007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.193059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.193073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.193094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.193106 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.295575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.295633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.295642 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.295659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.295668 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.398658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.398730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.398749 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.398779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.398799 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.502397 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.502471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.502496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.502571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.502598 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.605439 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.605480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.605492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.605508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.605519 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.661299 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 22:13:07.321477114 +0000 UTC Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.707938 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.707997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.708014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.708045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.708064 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.718136 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.718159 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.718212 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.718315 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.718312 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.718470 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.718687 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.718756 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.726009 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.726236 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:48 crc kubenswrapper[4982]: E0122 05:46:48.726311 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:47:20.726289481 +0000 UTC m=+101.564927504 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.811823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.811903 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.811916 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.811937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.811951 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.914391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.914754 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.914823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.914987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:48 crc kubenswrapper[4982]: I0122 05:46:48.915065 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:48Z","lastTransitionTime":"2026-01-22T05:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.017907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.018172 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.018236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.018306 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.018369 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.121935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.122430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.122523 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.122605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.122664 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.225701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.225995 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.226118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.226268 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.226387 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.329237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.329278 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.329288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.329304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.329315 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.431504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.431540 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.431548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.431562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.431571 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.533774 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.533817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.533828 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.533847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.533873 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.635984 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.636031 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.636041 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.636057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.636067 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.662480 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 07:18:39.30897331 +0000 UTC Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.735568 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.737893 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.737930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.737941 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.737957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.737974 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.752223 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.768764 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.790877 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.807381 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.821220 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.836410 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.841123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.841180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.841194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.841214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.841229 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.852059 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.866997 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.881104 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.895939 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.907436 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.920585 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.934424 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.943560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.943620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.943640 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.943666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.943685 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:49Z","lastTransitionTime":"2026-01-22T05:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.951714 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.965527 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:49 crc kubenswrapper[4982]: I0122 05:46:49.986479 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:49Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.045836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.045917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.045928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.045949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.045964 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.147951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.147988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.148002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.148022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.148035 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.252328 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.252396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.252412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.252441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.252466 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.355159 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.355207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.355221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.355242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.355255 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.427375 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/0.log" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.427438 4982 generic.go:334] "Generic (PLEG): container finished" podID="f70d53ec-9c73-45bf-b6b4-ec45565ef1e6" containerID="081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf" exitCode=1 Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.427478 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerDied","Data":"081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.427983 4982 scope.go:117] "RemoveContainer" containerID="081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.443613 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.458250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.458295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.458311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.458333 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.458346 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.462176 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.482844 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.498161 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.510943 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.523517 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.536165 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.550787 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.562451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.562501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.562512 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.562531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.562542 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.565158 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.577819 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.589654 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.603430 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.618930 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.632949 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.647533 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.659309 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.663207 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 11:18:04.18504578 +0000 UTC Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.665187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.665230 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.665243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.665267 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.665281 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.675905 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:50Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.718265 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.718283 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.718353 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.718418 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:50 crc kubenswrapper[4982]: E0122 05:46:50.718726 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:50 crc kubenswrapper[4982]: E0122 05:46:50.718910 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:50 crc kubenswrapper[4982]: E0122 05:46:50.719069 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.719161 4982 scope.go:117] "RemoveContainer" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" Jan 22 05:46:50 crc kubenswrapper[4982]: E0122 05:46:50.719198 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:50 crc kubenswrapper[4982]: E0122 05:46:50.719324 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.768147 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.768204 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.768215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.768232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.768244 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.870743 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.870786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.870796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.870812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.870823 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.973441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.973494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.973511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.973536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:50 crc kubenswrapper[4982]: I0122 05:46:50.973554 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:50Z","lastTransitionTime":"2026-01-22T05:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.077264 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.077341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.077363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.077393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.077416 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.180093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.180150 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.180162 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.180179 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.180194 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.282055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.282100 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.282111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.282130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.282145 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.385042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.385083 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.385125 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.385144 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.385155 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.432588 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/0.log" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.432675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerStarted","Data":"8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.448200 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.469768 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.487450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.487622 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.487695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.487762 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.487821 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.492796 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.518396 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.541150 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.554727 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.566365 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.580519 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.590326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.590378 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.590393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.590415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.590430 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.594101 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.607339 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.620521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.633105 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.646987 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.663150 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.663529 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 10:46:16.859991378 +0000 UTC Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.676574 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.693277 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.693336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.693353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.693379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.693397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.699122 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.712241 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:51Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.796325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.796554 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.796627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.796730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.796788 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.899580 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.899626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.899637 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.899657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:51 crc kubenswrapper[4982]: I0122 05:46:51.899669 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:51Z","lastTransitionTime":"2026-01-22T05:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.002246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.002283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.002294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.002310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.002323 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.105315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.105360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.105373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.105391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.105402 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.208008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.208038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.208046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.208061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.208070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.310991 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.311031 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.311042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.311060 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.311072 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.413928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.413976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.413989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.414009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.414022 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.516397 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.516463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.516484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.516511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.516561 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.534892 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.534948 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.534971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.535000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.535016 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.553303 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:52Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.557305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.557476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.557599 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.557728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.557891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.574361 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:52Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.578458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.578500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.578510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.578528 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.578540 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.593658 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:52Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.602284 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.602335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.602349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.602365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.602374 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.619274 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:52Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.623311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.623451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.623545 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.623656 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.623743 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.635873 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:52Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.636177 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.658152 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.658214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.658231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.658254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.658268 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.663822 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 07:48:01.675926042 +0000 UTC Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.718624 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.718668 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.718687 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.718814 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.718830 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.718956 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.719032 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:52 crc kubenswrapper[4982]: E0122 05:46:52.719071 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.765774 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.765812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.765821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.765838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.765866 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.868440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.868727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.868793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.868886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.868966 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.972195 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.972253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.972270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.972295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:52 crc kubenswrapper[4982]: I0122 05:46:52.972312 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:52Z","lastTransitionTime":"2026-01-22T05:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.075072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.075124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.075137 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.075158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.075169 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.179279 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.179349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.179367 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.179392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.179410 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.282283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.282342 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.282354 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.282373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.282386 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.385197 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.385260 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.385275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.385294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.385310 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.488739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.489288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.489633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.489834 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.490029 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.593446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.593520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.593544 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.593572 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.593590 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.664274 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 00:04:42.183161567 +0000 UTC Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.696803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.696902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.696960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.696990 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.697012 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.800335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.800405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.800425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.800452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.800476 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.904430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.904514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.904542 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.904577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:53 crc kubenswrapper[4982]: I0122 05:46:53.904605 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:53Z","lastTransitionTime":"2026-01-22T05:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.006889 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.006947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.007004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.007030 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.007051 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.110524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.110577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.110595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.110620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.110638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.214213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.214687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.214900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.215117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.215274 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.318959 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.319034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.319057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.319091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.319114 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.423584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.423651 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.423670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.423696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.423717 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.527311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.527372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.527391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.527416 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.527433 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.630287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.630464 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.630483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.630505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.630523 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.664910 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 13:01:12.213061739 +0000 UTC Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.718716 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.718785 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.718819 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.718727 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:54 crc kubenswrapper[4982]: E0122 05:46:54.718976 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:54 crc kubenswrapper[4982]: E0122 05:46:54.719105 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:54 crc kubenswrapper[4982]: E0122 05:46:54.719441 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:54 crc kubenswrapper[4982]: E0122 05:46:54.719539 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.738791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.739021 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.739064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.739098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.739122 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.842881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.842938 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.842955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.842980 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.842999 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.947178 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.947655 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.947802 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.947974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:54 crc kubenswrapper[4982]: I0122 05:46:54.948381 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:54Z","lastTransitionTime":"2026-01-22T05:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.052761 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.052823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.052840 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.052936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.052961 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.156461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.156741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.156803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.156891 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.156954 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.261722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.261775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.261788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.261810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.261824 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.365693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.366181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.366351 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.366688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.366810 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.470822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.470928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.470946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.470971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.470988 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.574739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.574795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.574817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.574846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.574906 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.665309 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 12:04:35.761574365 +0000 UTC Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.678353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.678599 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.678797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.679097 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.679334 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.782240 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.782618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.782793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.783034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.783192 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.888220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.888288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.888305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.888333 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.888354 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.992059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.992110 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.992128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.992158 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:55 crc kubenswrapper[4982]: I0122 05:46:55.992179 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:55Z","lastTransitionTime":"2026-01-22T05:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.095205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.095299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.095316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.095342 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.095361 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.198336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.198383 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.198401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.198424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.198442 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.303536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.303588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.303597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.303616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.303626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.407284 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.407356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.407374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.407399 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.407417 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.510729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.510804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.510838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.510917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.510939 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.615073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.615146 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.615169 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.615203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.615228 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.666169 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 04:47:50.311215093 +0000 UTC Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718373 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718402 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718456 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718407 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.718407 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:56 crc kubenswrapper[4982]: E0122 05:46:56.718621 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:56 crc kubenswrapper[4982]: E0122 05:46:56.718920 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:56 crc kubenswrapper[4982]: E0122 05:46:56.719061 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:56 crc kubenswrapper[4982]: E0122 05:46:56.719175 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.821719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.821793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.821810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.821835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.821887 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.926495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.926588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.926614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.926648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:56 crc kubenswrapper[4982]: I0122 05:46:56.926675 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:56Z","lastTransitionTime":"2026-01-22T05:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.030295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.030360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.030377 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.030404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.030422 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.134231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.134292 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.134309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.134344 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.134365 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.238071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.238133 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.238203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.238223 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.238238 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.341674 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.341710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.341721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.341739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.341751 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.445392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.445457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.445468 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.445486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.445500 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.548947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.549017 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.549035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.549071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.549093 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.651577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.651617 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.651628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.651643 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.651655 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.666348 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 20:07:26.550953499 +0000 UTC Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.755019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.755084 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.755095 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.755112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.755126 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.858092 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.858136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.858144 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.858159 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.858169 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.961275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.961315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.961326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.961345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:57 crc kubenswrapper[4982]: I0122 05:46:57.961359 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:57Z","lastTransitionTime":"2026-01-22T05:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.064330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.064383 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.064402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.064427 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.064444 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.166965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.167042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.167065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.167090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.167109 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.270471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.270552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.270588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.270619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.270641 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.373753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.373845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.373911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.373950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.373978 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.477498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.477609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.477629 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.477687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.477706 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.580180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.580208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.580221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.580234 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.580245 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.667176 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 11:59:21.812143121 +0000 UTC Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.682061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.682115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.682124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.682143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.682154 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.718553 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.718597 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.718663 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.718748 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:46:58 crc kubenswrapper[4982]: E0122 05:46:58.718758 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:46:58 crc kubenswrapper[4982]: E0122 05:46:58.718897 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:46:58 crc kubenswrapper[4982]: E0122 05:46:58.718987 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:46:58 crc kubenswrapper[4982]: E0122 05:46:58.719173 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.785619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.785688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.785709 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.785740 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.785763 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.889068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.889137 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.889164 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.889195 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.889238 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.992926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.992979 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.992992 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.993015 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:58 crc kubenswrapper[4982]: I0122 05:46:58.993031 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:58Z","lastTransitionTime":"2026-01-22T05:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.096312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.096396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.096416 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.096455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.096481 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.199911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.199978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.199996 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.200023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.200044 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.303429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.303475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.303489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.303505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.303521 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.406372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.406500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.406525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.406562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.406588 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.510214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.510287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.510302 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.510334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.510356 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.612662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.612726 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.612741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.612767 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.612785 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.667622 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 08:40:13.499667299 +0000 UTC Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.715892 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.715955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.715968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.715989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.716003 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.734297 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.750330 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.773692 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.797096 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.816970 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.819235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.819274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.819288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.819310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.819323 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.834475 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.846487 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.864463 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.880845 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.896164 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.911987 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.922059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.922101 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.922116 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.922138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.922151 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:46:59Z","lastTransitionTime":"2026-01-22T05:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.937874 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:46:59 crc kubenswrapper[4982]: I0122 05:46:59.960003 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:46:59Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.011720 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.024365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.024425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.024442 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.024467 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.024485 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.037931 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.063438 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.075943 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.089373 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:00Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.127918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.127983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.128001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.128031 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.128052 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.231274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.231345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.231358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.231376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.231389 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.335555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.335640 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.335664 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.335692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.335713 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.438844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.438922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.438930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.438979 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.438990 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.542871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.542924 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.542935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.542955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.542969 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.646090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.646160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.646177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.646203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.646222 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.668635 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 04:43:03.565509154 +0000 UTC Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.718561 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.718680 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.718601 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:00 crc kubenswrapper[4982]: E0122 05:47:00.718766 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:00 crc kubenswrapper[4982]: E0122 05:47:00.718836 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.718689 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:00 crc kubenswrapper[4982]: E0122 05:47:00.719060 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:00 crc kubenswrapper[4982]: E0122 05:47:00.719202 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.749721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.749818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.749839 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.749897 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.749918 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.854771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.854845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.854894 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.854922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.854943 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.959235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.959322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.959345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.959379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:00 crc kubenswrapper[4982]: I0122 05:47:00.959402 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:00Z","lastTransitionTime":"2026-01-22T05:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.063498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.063562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.063581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.063611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.063637 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.166606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.166654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.166665 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.166683 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.166695 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.269942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.270011 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.270028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.270053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.270073 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.373271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.373326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.373345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.373370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.373388 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.477729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.477797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.477816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.477844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.477891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.581727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.581788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.581806 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.581832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.581881 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.669604 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 06:36:38.750537594 +0000 UTC Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.685807 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.685898 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.685928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.685958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.685984 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.720159 4982 scope.go:117] "RemoveContainer" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.789380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.789431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.789440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.789455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.789465 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.892212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.892286 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.892304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.892331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.892350 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.995444 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.995524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.995536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.995553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:01 crc kubenswrapper[4982]: I0122 05:47:01.995613 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:01Z","lastTransitionTime":"2026-01-22T05:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.100419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.100476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.100494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.100520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.100534 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.204591 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.204668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.204692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.204740 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.204764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.307772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.307835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.307881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.307907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.307926 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.411032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.411112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.411137 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.411165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.411185 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.514976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.515011 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.515041 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.515059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.515071 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.599070 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.599362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.599559 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.599644 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.599626355 +0000 UTC m=+147.438264368 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.599954 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.599923443 +0000 UTC m=+147.438561456 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.619008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.619162 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.619184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.619244 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.619266 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.670403 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 09:55:25.062756608 +0000 UTC Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.700336 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.700452 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.700529 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.700511638 +0000 UTC m=+147.539149671 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.718399 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.718431 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.718474 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.718592 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.718816 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.718824 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.719096 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.719232 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.722350 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.722393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.722414 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.722440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.722456 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.816300 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.816349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.816361 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.816385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.816397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.840178 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.846271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.846370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.846405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.846443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.846469 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.871463 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.877971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.878024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.878055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.878075 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.878088 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.903475 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.903558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903806 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903844 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903884 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903910 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903916 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.903935 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.904018 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.903983024 +0000 UTC m=+147.742621067 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.904068 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.904046855 +0000 UTC m=+147.742685128 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.906352 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.912081 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.912139 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.912159 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.912186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.912206 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.935291 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.941374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.941432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.941452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.941481 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.941506 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.967322 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T05:47:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8c22d1f6-312d-4feb-94eb-4bba85065af7\\\",\\\"systemUUID\\\":\\\"a4516023-e293-4f20-9f1e-642f5f94b84c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:02Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:02 crc kubenswrapper[4982]: E0122 05:47:02.967719 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.970628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.970694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.970707 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.970724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:02 crc kubenswrapper[4982]: I0122 05:47:02.970734 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:02Z","lastTransitionTime":"2026-01-22T05:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.073743 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.073792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.073803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.073819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.073832 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.176945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.176976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.176984 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.177002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.177012 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.280301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.280373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.280393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.280421 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.280442 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.390879 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.390944 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.390959 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.390982 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.390997 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.489903 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/2.log" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493245 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493288 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.493969 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.494891 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.524029 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb16d24-5f0f-4f27-9c9d-0391d24cdf92\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7243427e708ca315efe99d8d87dc6078502a4ac8bbab9d3c1a14a3be8e479970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.536594 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.556411 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.568748 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.584660 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.595350 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.595390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.595402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.595423 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.595436 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.598985 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.615747 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.630639 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.651285 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.667193 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.670763 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 19:28:48.614139006 +0000 UTC Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.684156 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.698782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.698936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.699026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.699669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.699745 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.705110 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.724355 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.740204 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.757602 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.771571 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.785387 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.800262 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.802502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.802551 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.802567 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.802590 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.802605 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.906810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.906922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.906942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.906975 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:03 crc kubenswrapper[4982]: I0122 05:47:03.906999 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:03Z","lastTransitionTime":"2026-01-22T05:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.010423 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.010475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.010487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.010504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.010514 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.113666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.113745 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.113769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.113800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.113823 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.217324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.217498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.217519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.217548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.217572 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.321343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.321433 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.321456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.321488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.321513 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.425766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.425973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.425997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.426026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.426046 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.508375 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/3.log" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.509344 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/2.log" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.513803 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" exitCode=1 Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.513914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.513987 4982 scope.go:117] "RemoveContainer" containerID="e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.515403 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:47:04 crc kubenswrapper[4982]: E0122 05:47:04.515759 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.529465 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.530343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.530376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.530411 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.530435 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.533831 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.549903 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.565432 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.583352 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.596141 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.612175 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.629794 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.634098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.634182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.634210 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.634240 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.634262 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.645267 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.664684 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.671298 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 19:12:18.273674921 +0000 UTC Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.677912 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.704374 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.718425 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.718554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.718584 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.718673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:04 crc kubenswrapper[4982]: E0122 05:47:04.718839 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:04 crc kubenswrapper[4982]: E0122 05:47:04.719108 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:04 crc kubenswrapper[4982]: E0122 05:47:04.719298 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:04 crc kubenswrapper[4982]: E0122 05:47:04.719426 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.721310 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb16d24-5f0f-4f27-9c9d-0391d24cdf92\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7243427e708ca315efe99d8d87dc6078502a4ac8bbab9d3c1a14a3be8e479970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738678 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738757 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.738811 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.757386 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.784392 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a9d78be03393c2eb386434bf86f740e39014aae32e861f7e07202d2e89d555\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:35Z\\\",\\\"message\\\":\\\"ssFirewall event handler 9\\\\nI0122 05:46:34.803442 6626 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 05:46:34.803606 6626 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.803765 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804048 6626 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 05:46:34.804238 6626 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 05:46:34.804977 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 05:46:34.805007 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 05:46:34.805013 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 05:46:34.805037 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 05:46:34.805078 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 05:46:34.805083 6626 factory.go:656] Stopping watch factory\\\\nI0122 05:46:34.805103 6626 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:47:03Z\\\",\\\"message\\\":\\\"or occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z]\\\\nI0122 05:47:03.684123 7023 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-rl6kj after 0 failed attempt(s)\\\\nI0122 05:47:03.684132 7023 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-rl6kj\\\\nI0122 05:47:03.684030 7023 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nI0122 05:47:03.684104 7023 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.803524 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.819141 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.837999 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:04Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.842701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.842762 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.842782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.842808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.842823 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.946530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.946585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.946601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.946624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:04 crc kubenswrapper[4982]: I0122 05:47:04.946638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:04Z","lastTransitionTime":"2026-01-22T05:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.050107 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.050160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.050177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.050203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.050221 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.153414 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.153708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.153793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.153986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.154087 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.257179 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.257271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.257284 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.257307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.257323 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.360322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.360725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.361055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.361199 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.361352 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.464352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.464421 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.464440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.464470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.464490 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.567763 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.567815 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.567837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.567883 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.567899 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.571475 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/3.log" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.578111 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:47:05 crc kubenswrapper[4982]: E0122 05:47:05.578412 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.598503 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.623397 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.643007 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.663927 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.670404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.670490 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.670507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.670526 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.670539 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.671802 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 12:22:28.692137142 +0000 UTC Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.681964 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb16d24-5f0f-4f27-9c9d-0391d24cdf92\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7243427e708ca315efe99d8d87dc6078502a4ac8bbab9d3c1a14a3be8e479970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.704502 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.734233 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:47:03Z\\\",\\\"message\\\":\\\"or occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z]\\\\nI0122 05:47:03.684123 7023 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-rl6kj after 0 failed attempt(s)\\\\nI0122 05:47:03.684132 7023 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-rl6kj\\\\nI0122 05:47:03.684030 7023 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nI0122 05:47:03.684104 7023 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:47:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.754547 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.769663 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.774081 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.774120 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.774134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.774152 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.774166 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.793581 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.817514 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.840328 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.866820 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.877930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.878001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.878019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.878050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.878069 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.893533 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.912065 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.932426 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.951469 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.972824 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:05Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.981053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.981100 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.981111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.981128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:05 crc kubenswrapper[4982]: I0122 05:47:05.981141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:05Z","lastTransitionTime":"2026-01-22T05:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.084436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.084517 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.084538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.084574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.084600 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.187873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.187943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.187956 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.187978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.187993 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.292772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.292883 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.292907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.292936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.292955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.396047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.396103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.396118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.396140 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.396153 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.500105 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.500229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.500250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.500281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.500306 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.603295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.603358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.603369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.603388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.603401 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.672144 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 07:55:50.521119629 +0000 UTC Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.707428 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.707493 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.707505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.707522 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.707534 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.718906 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.718937 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.718995 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.718936 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:06 crc kubenswrapper[4982]: E0122 05:47:06.719106 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:06 crc kubenswrapper[4982]: E0122 05:47:06.719427 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:06 crc kubenswrapper[4982]: E0122 05:47:06.719478 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:06 crc kubenswrapper[4982]: E0122 05:47:06.719532 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.810495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.810553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.810575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.810602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.810622 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.914454 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.914512 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.914532 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.914563 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:06 crc kubenswrapper[4982]: I0122 05:47:06.914582 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:06Z","lastTransitionTime":"2026-01-22T05:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.017449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.017516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.017535 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.017569 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.017596 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.120822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.120897 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.120952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.120972 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.120986 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.224366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.224446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.224468 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.224496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.224511 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.327920 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.327975 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.327992 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.328020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.328034 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.431353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.431428 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.431451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.431480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.431502 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.535146 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.535197 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.535208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.535225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.535239 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.637955 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.637994 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.638005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.638022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.638034 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.673170 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 23:22:42.897012266 +0000 UTC Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.758681 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.758748 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.758758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.758773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.758784 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.862039 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.862090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.862148 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.862213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.862233 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.965458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.965502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.965513 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.965529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:07 crc kubenswrapper[4982]: I0122 05:47:07.965541 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:07Z","lastTransitionTime":"2026-01-22T05:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.068550 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.068624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.068643 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.068671 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.068689 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.172873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.172908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.172917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.172933 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.172944 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.276695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.276753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.276773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.276797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.276816 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.380614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.380703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.380727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.380758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.380777 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.483556 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.483610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.483625 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.483645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.483662 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.585785 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.585895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.585915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.585938 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.585958 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.674755 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 06:03:04.309276643 +0000 UTC Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.688227 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.688288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.688312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.688344 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.688370 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.719120 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.719144 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.719145 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:08 crc kubenswrapper[4982]: E0122 05:47:08.719489 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:08 crc kubenswrapper[4982]: E0122 05:47:08.719640 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:08 crc kubenswrapper[4982]: E0122 05:47:08.719897 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.720011 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:08 crc kubenswrapper[4982]: E0122 05:47:08.720322 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.791875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.791925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.791937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.791956 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.791973 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.894931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.894988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.895008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.895029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.895045 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.997948 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.997993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.998005 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.998024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:08 crc kubenswrapper[4982]: I0122 05:47:08.998035 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:08Z","lastTransitionTime":"2026-01-22T05:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.100634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.100672 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.100680 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.100694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.100706 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.204045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.204087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.204099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.204117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.204126 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.311150 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.311287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.311304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.311340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.311357 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.414321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.414386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.414401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.414424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.414438 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.517812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.517895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.517909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.517929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.517943 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.620130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.620161 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.620170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.620184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.620196 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.675944 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 12:22:19.156863385 +0000 UTC Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.723915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.723986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.724004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.724034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.724052 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.739137 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ed4c6de287b03090ee1c6132bb3587996906a4db3cde4ee84694f01003eaf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.761667 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.780010 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rnqxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73c68787-8e13-4ad7-bef9-00a12d537c34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56aa75ca3093d3cbe9cafd655f85333ce631d29f802a3cff49d35e3759baafb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vnclm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rnqxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.804948 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f98dk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64ec5ffc-3edf-4820-bc21-58b1ace5615c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d89d2c542320e62250c1cd2e647a48a26be58b60f268f696031d49a0292938c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56b348bc873c095ac99a10152689392033b51ece472ad590aae99de0cb50dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cdcfe75481022bb2bad4a5d2199b5d32cf40253c4d2489ebcd26525fba57055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc0c4fa50d1b602812bb72b90455932eccef497d9e164819f0264d101f4e8e57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe12d5a3bf6d28cbce6fcc755c50a2218541b141085dacc4f4736fba682c67db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a589f49c75d1d8a4df631752d6a64eba720ce2409c9faecd5dc9b6c2280281b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc8dd9335de18367955b666d33e9eb947a9e07b1a34861faf3231e346a12ba57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s4zgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f98dk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.821757 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb16d24-5f0f-4f27-9c9d-0391d24cdf92\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7243427e708ca315efe99d8d87dc6078502a4ac8bbab9d3c1a14a3be8e479970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://988795d75122feff13d8661e876ec6e4eadd09358efc3cd8f1adb05c5a7ab1a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.826070 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.826099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.826111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.826129 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.826140 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.845138 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc16028f4d00a4e029f1b80cf3d4cfdf07b4602ae300762ed2181b0766c8a2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.867754 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4xrw6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:46:50Z\\\",\\\"message\\\":\\\"2026-01-22T05:46:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0\\\\n2026-01-22T05:46:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e563e083-93cb-4d64-acfe-8dc4862aa7e0 to /host/opt/cni/bin/\\\\n2026-01-22T05:46:05Z [verbose] multus-daemon started\\\\n2026-01-22T05:46:05Z [verbose] Readiness Indicator file check\\\\n2026-01-22T05:46:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vj8lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4xrw6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.905734 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T05:47:03Z\\\",\\\"message\\\":\\\"or occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:03Z is after 2025-08-24T17:21:41Z]\\\\nI0122 05:47:03.684123 7023 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-rl6kj after 0 failed attempt(s)\\\\nI0122 05:47:03.684132 7023 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-rl6kj\\\\nI0122 05:47:03.684030 7023 services_controller.go:356] Processing sync for service openshift-ingress-canary/ingress-canary for network=default\\\\nI0122 05:47:03.684104 7023 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/community-operators]} name:Service_openshift-marketplace/community-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.189:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d389393c-7ba9-422c-b3f5-06e391d537d2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T05:47:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fr8wb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mb2qs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.927049 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d968804b-0683-488d-bc01-a0a16b689d6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9acc1a82033c3ce19a5331828cf1e54919f3eb43181d8ef15049d67b6c56dd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7df0c7cfcbf807a71f0b022f48858333e3de0be3f3de087efe5a231d41a9db90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68mz9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9d4vg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.928479 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.928582 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.928658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.928721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.928743 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:09Z","lastTransitionTime":"2026-01-22T05:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.945094 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"377cd579-2ade-48ea-ad2a-44d1546fd5fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z67hv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gxwx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.967165 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"226bd9e0-6c4e-4557-8b83-37ea8a6ee58a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2fde4606e3dbcbd15936e2589b9171ee7c8a0e3f304a106dcbc191202ed5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8a3e4650a54b1dc58e5deeaf3d4145acc3ae373514d4610999ab2d44e5902d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c84f1037fdebe68f01491ebe51a9df7be475d98b250550b8d62265e8242470a9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:09 crc kubenswrapper[4982]: I0122 05:47:09.986797 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b37309fa-e826-4c04-96a2-611415609985\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22646904f96d12e0c71cbc01fb8c49f02245acaca106b7090e22da3d3e22028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b85b4f1d0cefc55e522ca2999285f71b01ee3a5af68db920737dc01ac21d2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1004d804d31b1e415e24a30aa33e89a83d8a4cbc4db54be6001f3306461f097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efeef92a1a0c1b87ccce6c59c96e0fe0ec322fd7f828993c028dd9f8217fc33e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:09Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.008320 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.029089 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.033274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.033336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.033356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.033383 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.033404 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.050311 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39727ce35bdde391057ea8ec163bdc34417b7b3305870480df9c02ae4fd05199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3811f5d50020b1e867109d3b52f47985aa92d1039e21c85516033181f33481c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.063500 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-rl6kj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fef7fafd-1792-4aba-b5f6-67faaa11bdb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f3472397a3fcbe87fdde4aa0970b2c5c1e38402a681b33726e03af001f334\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jf76n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-rl6kj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.080455 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2829369e-72ba-4637-853b-88f5cf242a0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a5a9637b9fff4f4ab8c22ab18e8793afbbdc227383d107cf18d4049aeb29a74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nm8ld\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:46:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gdpxx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.101776 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d271a46c-ce9f-453b-93fe-a489ea56d1e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T05:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T05:45:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T05:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T05:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T05:45:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T05:47:10Z is after 2025-08-24T17:21:41Z" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.136183 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.136238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.136252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.136274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.136288 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.239434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.239483 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.239501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.239525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.239545 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.342846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.344074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.344254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.344432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.344584 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.448550 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.448956 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.449150 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.449324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.449473 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.552584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.552684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.552704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.552729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.552747 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.656338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.656412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.656435 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.656469 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.656491 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.677134 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 21:15:31.073625264 +0000 UTC Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.719257 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.719430 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.719637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:10 crc kubenswrapper[4982]: E0122 05:47:10.719634 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.719762 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:10 crc kubenswrapper[4982]: E0122 05:47:10.719891 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:10 crc kubenswrapper[4982]: E0122 05:47:10.720543 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:10 crc kubenswrapper[4982]: E0122 05:47:10.720683 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.759498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.759546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.759558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.759579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.759594 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.862758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.863266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.863458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.863650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.864044 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.967489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.967566 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.967589 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.967621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:10 crc kubenswrapper[4982]: I0122 05:47:10.967642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:10Z","lastTransitionTime":"2026-01-22T05:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.071270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.071309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.071318 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.071334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.071345 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.174558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.174654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.174682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.174719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.174745 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.277775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.277823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.277887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.277913 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.277929 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.380703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.380770 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.380789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.380817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.380838 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.484305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.484369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.484386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.484416 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.484433 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.588728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.588784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.588793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.588810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.588822 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.677496 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 09:08:30.732701199 +0000 UTC Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.691565 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.691597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.691607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.691626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.691638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.795525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.795604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.795623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.795653 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.795684 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.899357 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.899438 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.899457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.899485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:11 crc kubenswrapper[4982]: I0122 05:47:11.899507 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:11Z","lastTransitionTime":"2026-01-22T05:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.003175 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.003251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.003270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.003298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.003317 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.108092 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.108177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.108207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.108236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.108260 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.215686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.215782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.215803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.215833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.215887 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.321212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.321340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.321402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.321448 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.321521 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.426309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.426755 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.426813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.426845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.426931 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.536987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.537077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.537090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.537112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.537124 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.641009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.641076 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.641094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.641122 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.641141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.677653 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 12:34:19.357638619 +0000 UTC Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.719229 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.719321 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.719374 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.719448 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:12 crc kubenswrapper[4982]: E0122 05:47:12.719449 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:12 crc kubenswrapper[4982]: E0122 05:47:12.719561 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:12 crc kubenswrapper[4982]: E0122 05:47:12.719673 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:12 crc kubenswrapper[4982]: E0122 05:47:12.719740 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.745452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.745516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.745538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.745564 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.745582 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.849725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.849789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.849806 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.849830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.849847 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.953040 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.953125 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.953145 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.953177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:12 crc kubenswrapper[4982]: I0122 05:47:12.953199 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:12Z","lastTransitionTime":"2026-01-22T05:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.058534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.059176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.059254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.059283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.059302 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:13Z","lastTransitionTime":"2026-01-22T05:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.162795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.162968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.162993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.163032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.163058 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:13Z","lastTransitionTime":"2026-01-22T05:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.266256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.266309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.266321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.266345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.266359 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:13Z","lastTransitionTime":"2026-01-22T05:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.304942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.305025 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.305048 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.305087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.305116 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T05:47:13Z","lastTransitionTime":"2026-01-22T05:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.377991 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp"] Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.378639 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.380755 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.382247 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.382841 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.382964 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.416475 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-rnqxd" podStartSLOduration=72.416434627 podStartE2EDuration="1m12.416434627s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.416106159 +0000 UTC m=+94.254744172" watchObservedRunningTime="2026-01-22 05:47:13.416434627 +0000 UTC m=+94.255072680" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.439674 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.439807 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4b4875a9-2957-46fc-a207-b49c05ffdfea-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.439900 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b4875a9-2957-46fc-a207-b49c05ffdfea-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.440098 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.440171 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b4875a9-2957-46fc-a207-b49c05ffdfea-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.444703 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-f98dk" podStartSLOduration=71.444676139 podStartE2EDuration="1m11.444676139s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.443175703 +0000 UTC m=+94.281813716" watchObservedRunningTime="2026-01-22 05:47:13.444676139 +0000 UTC m=+94.283314182" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.475964 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=14.475914565 podStartE2EDuration="14.475914565s" podCreationTimestamp="2026-01-22 05:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.475042054 +0000 UTC m=+94.313680077" watchObservedRunningTime="2026-01-22 05:47:13.475914565 +0000 UTC m=+94.314552578" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.541913 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.541979 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4b4875a9-2957-46fc-a207-b49c05ffdfea-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.542015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b4875a9-2957-46fc-a207-b49c05ffdfea-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.542052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b4875a9-2957-46fc-a207-b49c05ffdfea-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.542080 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.542158 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.542305 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4b4875a9-2957-46fc-a207-b49c05ffdfea-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.549259 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9d4vg" podStartSLOduration=71.549219371 podStartE2EDuration="1m11.549219371s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.542383724 +0000 UTC m=+94.381021767" watchObservedRunningTime="2026-01-22 05:47:13.549219371 +0000 UTC m=+94.387857414" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.549765 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4b4875a9-2957-46fc-a207-b49c05ffdfea-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.567307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b4875a9-2957-46fc-a207-b49c05ffdfea-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.570401 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b4875a9-2957-46fc-a207-b49c05ffdfea-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cw7zp\" (UID: \"4b4875a9-2957-46fc-a207-b49c05ffdfea\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.597818 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=74.597794462 podStartE2EDuration="1m14.597794462s" podCreationTimestamp="2026-01-22 05:45:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.597105186 +0000 UTC m=+94.435743199" watchObservedRunningTime="2026-01-22 05:47:13.597794462 +0000 UTC m=+94.436432475" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.642152 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-4xrw6" podStartSLOduration=71.642128469 podStartE2EDuration="1m11.642128469s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.641897313 +0000 UTC m=+94.480535336" watchObservedRunningTime="2026-01-22 05:47:13.642128469 +0000 UTC m=+94.480766482" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.678054 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 16:44:38.478913562 +0000 UTC Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.678128 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.687662 4982 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.697822 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.699443 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-rl6kj" podStartSLOduration=72.699420653 podStartE2EDuration="1m12.699420653s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.698809648 +0000 UTC m=+94.537447651" watchObservedRunningTime="2026-01-22 05:47:13.699420653 +0000 UTC m=+94.538058666" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.722355 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podStartSLOduration=72.722334104 podStartE2EDuration="1m12.722334104s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.720685074 +0000 UTC m=+94.559323087" watchObservedRunningTime="2026-01-22 05:47:13.722334104 +0000 UTC m=+94.560972117" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.749900 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=76.749843188 podStartE2EDuration="1m16.749843188s" podCreationTimestamp="2026-01-22 05:45:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.748317952 +0000 UTC m=+94.586955985" watchObservedRunningTime="2026-01-22 05:47:13.749843188 +0000 UTC m=+94.588481201" Jan 22 05:47:13 crc kubenswrapper[4982]: I0122 05:47:13.786120 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.786097366999996 podStartE2EDuration="48.786097367s" podCreationTimestamp="2026-01-22 05:46:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:13.766581979 +0000 UTC m=+94.605219982" watchObservedRunningTime="2026-01-22 05:47:13.786097367 +0000 UTC m=+94.624735370" Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.623788 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" event={"ID":"4b4875a9-2957-46fc-a207-b49c05ffdfea","Type":"ContainerStarted","Data":"967d75370683ee6c89354887a9cb57fb1c9c71236734fc70ab634d06de35f55f"} Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.623920 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" event={"ID":"4b4875a9-2957-46fc-a207-b49c05ffdfea","Type":"ContainerStarted","Data":"fe324f775e52550e0e2e54a7d41dfbdd972401510674ceb2fc2595e676c3b540"} Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.647588 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cw7zp" podStartSLOduration=72.647541879 podStartE2EDuration="1m12.647541879s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:14.644765751 +0000 UTC m=+95.483403794" watchObservedRunningTime="2026-01-22 05:47:14.647541879 +0000 UTC m=+95.486179932" Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.718267 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.718319 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.718274 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:14 crc kubenswrapper[4982]: I0122 05:47:14.718380 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:14 crc kubenswrapper[4982]: E0122 05:47:14.718515 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:14 crc kubenswrapper[4982]: E0122 05:47:14.718793 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:14 crc kubenswrapper[4982]: E0122 05:47:14.718904 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:14 crc kubenswrapper[4982]: E0122 05:47:14.718991 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:16 crc kubenswrapper[4982]: I0122 05:47:16.718141 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:16 crc kubenswrapper[4982]: I0122 05:47:16.718212 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:16 crc kubenswrapper[4982]: E0122 05:47:16.718334 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:16 crc kubenswrapper[4982]: I0122 05:47:16.718358 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:16 crc kubenswrapper[4982]: I0122 05:47:16.718263 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:16 crc kubenswrapper[4982]: E0122 05:47:16.718496 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:16 crc kubenswrapper[4982]: E0122 05:47:16.718695 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:16 crc kubenswrapper[4982]: E0122 05:47:16.718918 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:17 crc kubenswrapper[4982]: I0122 05:47:17.719256 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:17 crc kubenswrapper[4982]: E0122 05:47:17.719526 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:18 crc kubenswrapper[4982]: I0122 05:47:18.718447 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:18 crc kubenswrapper[4982]: I0122 05:47:18.718936 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:18 crc kubenswrapper[4982]: I0122 05:47:18.719100 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:18 crc kubenswrapper[4982]: E0122 05:47:18.719451 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:18 crc kubenswrapper[4982]: E0122 05:47:18.719097 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:18 crc kubenswrapper[4982]: E0122 05:47:18.720031 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:19 crc kubenswrapper[4982]: I0122 05:47:19.718611 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:19 crc kubenswrapper[4982]: E0122 05:47:19.720887 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:19 crc kubenswrapper[4982]: I0122 05:47:19.722574 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:47:19 crc kubenswrapper[4982]: E0122 05:47:19.722911 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:47:19 crc kubenswrapper[4982]: I0122 05:47:19.745078 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 22 05:47:20 crc kubenswrapper[4982]: I0122 05:47:20.718432 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:20 crc kubenswrapper[4982]: E0122 05:47:20.718565 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:20 crc kubenswrapper[4982]: I0122 05:47:20.718431 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:20 crc kubenswrapper[4982]: E0122 05:47:20.718654 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:20 crc kubenswrapper[4982]: I0122 05:47:20.718833 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:20 crc kubenswrapper[4982]: E0122 05:47:20.718985 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:20 crc kubenswrapper[4982]: I0122 05:47:20.729238 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:20 crc kubenswrapper[4982]: E0122 05:47:20.729348 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:47:20 crc kubenswrapper[4982]: E0122 05:47:20.729741 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs podName:377cd579-2ade-48ea-ad2a-44d1546fd5fb nodeName:}" failed. No retries permitted until 2026-01-22 05:48:24.729727538 +0000 UTC m=+165.568365541 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs") pod "network-metrics-daemon-gxwx2" (UID: "377cd579-2ade-48ea-ad2a-44d1546fd5fb") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 05:47:21 crc kubenswrapper[4982]: I0122 05:47:21.718890 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:21 crc kubenswrapper[4982]: E0122 05:47:21.719469 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:22 crc kubenswrapper[4982]: I0122 05:47:22.719133 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:22 crc kubenswrapper[4982]: I0122 05:47:22.719342 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:22 crc kubenswrapper[4982]: E0122 05:47:22.719715 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:22 crc kubenswrapper[4982]: I0122 05:47:22.720050 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:22 crc kubenswrapper[4982]: E0122 05:47:22.720041 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:22 crc kubenswrapper[4982]: E0122 05:47:22.720330 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:23 crc kubenswrapper[4982]: I0122 05:47:23.718785 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:23 crc kubenswrapper[4982]: E0122 05:47:23.719307 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:24 crc kubenswrapper[4982]: I0122 05:47:24.718921 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:24 crc kubenswrapper[4982]: I0122 05:47:24.718950 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:24 crc kubenswrapper[4982]: I0122 05:47:24.718957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:24 crc kubenswrapper[4982]: E0122 05:47:24.719117 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:24 crc kubenswrapper[4982]: E0122 05:47:24.719320 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:24 crc kubenswrapper[4982]: E0122 05:47:24.719602 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:25 crc kubenswrapper[4982]: I0122 05:47:25.718726 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:25 crc kubenswrapper[4982]: E0122 05:47:25.718907 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:26 crc kubenswrapper[4982]: I0122 05:47:26.718567 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:26 crc kubenswrapper[4982]: I0122 05:47:26.718609 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:26 crc kubenswrapper[4982]: I0122 05:47:26.718747 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:26 crc kubenswrapper[4982]: E0122 05:47:26.718826 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:26 crc kubenswrapper[4982]: E0122 05:47:26.719091 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:26 crc kubenswrapper[4982]: E0122 05:47:26.719222 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:27 crc kubenswrapper[4982]: I0122 05:47:27.719096 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:27 crc kubenswrapper[4982]: E0122 05:47:27.719352 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:28 crc kubenswrapper[4982]: I0122 05:47:28.718440 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:28 crc kubenswrapper[4982]: I0122 05:47:28.718477 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:28 crc kubenswrapper[4982]: E0122 05:47:28.718683 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:28 crc kubenswrapper[4982]: I0122 05:47:28.719230 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:28 crc kubenswrapper[4982]: E0122 05:47:28.719378 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:28 crc kubenswrapper[4982]: E0122 05:47:28.719999 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:29 crc kubenswrapper[4982]: I0122 05:47:29.719017 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:29 crc kubenswrapper[4982]: E0122 05:47:29.720945 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:29 crc kubenswrapper[4982]: I0122 05:47:29.760007 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=10.759990125 podStartE2EDuration="10.759990125s" podCreationTimestamp="2026-01-22 05:47:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:29.75893805 +0000 UTC m=+110.597576063" watchObservedRunningTime="2026-01-22 05:47:29.759990125 +0000 UTC m=+110.598628128" Jan 22 05:47:30 crc kubenswrapper[4982]: I0122 05:47:30.718389 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:30 crc kubenswrapper[4982]: I0122 05:47:30.718541 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:30 crc kubenswrapper[4982]: E0122 05:47:30.718590 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:30 crc kubenswrapper[4982]: I0122 05:47:30.718646 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:30 crc kubenswrapper[4982]: E0122 05:47:30.718799 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:30 crc kubenswrapper[4982]: E0122 05:47:30.719038 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:30 crc kubenswrapper[4982]: I0122 05:47:30.720283 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:47:30 crc kubenswrapper[4982]: E0122 05:47:30.720638 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mb2qs_openshift-ovn-kubernetes(45295ff5-bb7d-450f-9ff1-eeb4edb0d705)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" Jan 22 05:47:31 crc kubenswrapper[4982]: I0122 05:47:31.718780 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:31 crc kubenswrapper[4982]: E0122 05:47:31.718935 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:32 crc kubenswrapper[4982]: I0122 05:47:32.718722 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:32 crc kubenswrapper[4982]: I0122 05:47:32.719043 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:32 crc kubenswrapper[4982]: E0122 05:47:32.719216 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:32 crc kubenswrapper[4982]: E0122 05:47:32.719483 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:32 crc kubenswrapper[4982]: I0122 05:47:32.719518 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:32 crc kubenswrapper[4982]: E0122 05:47:32.719733 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:33 crc kubenswrapper[4982]: I0122 05:47:33.718701 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:33 crc kubenswrapper[4982]: E0122 05:47:33.719053 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:34 crc kubenswrapper[4982]: I0122 05:47:34.718336 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:34 crc kubenswrapper[4982]: I0122 05:47:34.718560 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:34 crc kubenswrapper[4982]: I0122 05:47:34.718625 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:34 crc kubenswrapper[4982]: E0122 05:47:34.718804 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:34 crc kubenswrapper[4982]: E0122 05:47:34.718955 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:34 crc kubenswrapper[4982]: E0122 05:47:34.719391 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:35 crc kubenswrapper[4982]: I0122 05:47:35.718606 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:35 crc kubenswrapper[4982]: E0122 05:47:35.718804 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.713460 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/1.log" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.715163 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/0.log" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.715248 4982 generic.go:334] "Generic (PLEG): container finished" podID="f70d53ec-9c73-45bf-b6b4-ec45565ef1e6" containerID="8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc" exitCode=1 Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.715404 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerDied","Data":"8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc"} Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.715525 4982 scope.go:117] "RemoveContainer" containerID="081f2a58469591985ae08e9dad34dc629c48c9736e0605460111893d189f76cf" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.716416 4982 scope.go:117] "RemoveContainer" containerID="8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc" Jan 22 05:47:36 crc kubenswrapper[4982]: E0122 05:47:36.716674 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-4xrw6_openshift-multus(f70d53ec-9c73-45bf-b6b4-ec45565ef1e6)\"" pod="openshift-multus/multus-4xrw6" podUID="f70d53ec-9c73-45bf-b6b4-ec45565ef1e6" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.721314 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.721435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:36 crc kubenswrapper[4982]: I0122 05:47:36.721464 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:36 crc kubenswrapper[4982]: E0122 05:47:36.721612 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:36 crc kubenswrapper[4982]: E0122 05:47:36.723030 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:36 crc kubenswrapper[4982]: E0122 05:47:36.723601 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:37 crc kubenswrapper[4982]: I0122 05:47:37.718528 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:37 crc kubenswrapper[4982]: E0122 05:47:37.718724 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:37 crc kubenswrapper[4982]: I0122 05:47:37.721369 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/1.log" Jan 22 05:47:38 crc kubenswrapper[4982]: I0122 05:47:38.719064 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:38 crc kubenswrapper[4982]: I0122 05:47:38.719216 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:38 crc kubenswrapper[4982]: E0122 05:47:38.719265 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:38 crc kubenswrapper[4982]: I0122 05:47:38.719435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:38 crc kubenswrapper[4982]: E0122 05:47:38.719479 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:38 crc kubenswrapper[4982]: E0122 05:47:38.719785 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:39 crc kubenswrapper[4982]: E0122 05:47:39.612241 4982 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 22 05:47:39 crc kubenswrapper[4982]: I0122 05:47:39.719002 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:39 crc kubenswrapper[4982]: E0122 05:47:39.721239 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:39 crc kubenswrapper[4982]: E0122 05:47:39.809469 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 05:47:40 crc kubenswrapper[4982]: I0122 05:47:40.719057 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:40 crc kubenswrapper[4982]: I0122 05:47:40.719069 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:40 crc kubenswrapper[4982]: I0122 05:47:40.719091 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:40 crc kubenswrapper[4982]: E0122 05:47:40.719784 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:40 crc kubenswrapper[4982]: E0122 05:47:40.720104 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:40 crc kubenswrapper[4982]: E0122 05:47:40.720179 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:41 crc kubenswrapper[4982]: I0122 05:47:41.718290 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:41 crc kubenswrapper[4982]: E0122 05:47:41.719951 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:42 crc kubenswrapper[4982]: I0122 05:47:42.718946 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:42 crc kubenswrapper[4982]: I0122 05:47:42.719116 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:42 crc kubenswrapper[4982]: E0122 05:47:42.719248 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:42 crc kubenswrapper[4982]: I0122 05:47:42.718954 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:42 crc kubenswrapper[4982]: E0122 05:47:42.719518 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:42 crc kubenswrapper[4982]: E0122 05:47:42.719640 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:43 crc kubenswrapper[4982]: I0122 05:47:43.719216 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:43 crc kubenswrapper[4982]: E0122 05:47:43.719506 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:44 crc kubenswrapper[4982]: I0122 05:47:44.719233 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:44 crc kubenswrapper[4982]: I0122 05:47:44.719331 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:44 crc kubenswrapper[4982]: I0122 05:47:44.719331 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:44 crc kubenswrapper[4982]: E0122 05:47:44.719473 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:44 crc kubenswrapper[4982]: E0122 05:47:44.719691 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:44 crc kubenswrapper[4982]: E0122 05:47:44.719835 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:44 crc kubenswrapper[4982]: E0122 05:47:44.811090 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 05:47:45 crc kubenswrapper[4982]: I0122 05:47:45.719072 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:45 crc kubenswrapper[4982]: E0122 05:47:45.719264 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:45 crc kubenswrapper[4982]: I0122 05:47:45.720074 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.719056 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:46 crc kubenswrapper[4982]: E0122 05:47:46.719183 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.719394 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:46 crc kubenswrapper[4982]: E0122 05:47:46.719441 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.719530 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:46 crc kubenswrapper[4982]: E0122 05:47:46.719574 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.728053 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gxwx2"] Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.728145 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:46 crc kubenswrapper[4982]: E0122 05:47:46.728237 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.766334 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/3.log" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.770372 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerStarted","Data":"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6"} Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.770950 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:47:46 crc kubenswrapper[4982]: I0122 05:47:46.807655 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podStartSLOduration=104.807625083 podStartE2EDuration="1m44.807625083s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:47:46.805303256 +0000 UTC m=+127.643941269" watchObservedRunningTime="2026-01-22 05:47:46.807625083 +0000 UTC m=+127.646263096" Jan 22 05:47:48 crc kubenswrapper[4982]: I0122 05:47:48.718574 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:48 crc kubenswrapper[4982]: I0122 05:47:48.718574 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:48 crc kubenswrapper[4982]: I0122 05:47:48.718620 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:48 crc kubenswrapper[4982]: I0122 05:47:48.718702 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:48 crc kubenswrapper[4982]: E0122 05:47:48.719190 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:48 crc kubenswrapper[4982]: E0122 05:47:48.719496 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:48 crc kubenswrapper[4982]: E0122 05:47:48.719766 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:48 crc kubenswrapper[4982]: E0122 05:47:48.720050 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:49 crc kubenswrapper[4982]: I0122 05:47:49.721823 4982 scope.go:117] "RemoveContainer" containerID="8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc" Jan 22 05:47:49 crc kubenswrapper[4982]: E0122 05:47:49.812194 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.514360 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.718533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.718702 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:50 crc kubenswrapper[4982]: E0122 05:47:50.718765 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:50 crc kubenswrapper[4982]: E0122 05:47:50.719112 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.719229 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:50 crc kubenswrapper[4982]: E0122 05:47:50.719336 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.719419 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:50 crc kubenswrapper[4982]: E0122 05:47:50.719499 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.798982 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/1.log" Jan 22 05:47:50 crc kubenswrapper[4982]: I0122 05:47:50.799050 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerStarted","Data":"fc395d6b6126f9c3f97817e0faf0cff772c0dd4a55c3471a807153cc002852ae"} Jan 22 05:47:52 crc kubenswrapper[4982]: I0122 05:47:52.718685 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:52 crc kubenswrapper[4982]: I0122 05:47:52.718754 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:52 crc kubenswrapper[4982]: I0122 05:47:52.718907 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:52 crc kubenswrapper[4982]: E0122 05:47:52.718956 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:52 crc kubenswrapper[4982]: I0122 05:47:52.719085 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:52 crc kubenswrapper[4982]: E0122 05:47:52.719258 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:52 crc kubenswrapper[4982]: E0122 05:47:52.719417 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:52 crc kubenswrapper[4982]: E0122 05:47:52.719521 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:54 crc kubenswrapper[4982]: I0122 05:47:54.719084 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:54 crc kubenswrapper[4982]: I0122 05:47:54.719198 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:54 crc kubenswrapper[4982]: I0122 05:47:54.719218 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:54 crc kubenswrapper[4982]: I0122 05:47:54.719131 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:54 crc kubenswrapper[4982]: E0122 05:47:54.719427 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gxwx2" podUID="377cd579-2ade-48ea-ad2a-44d1546fd5fb" Jan 22 05:47:54 crc kubenswrapper[4982]: E0122 05:47:54.719585 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 05:47:54 crc kubenswrapper[4982]: E0122 05:47:54.719674 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 05:47:54 crc kubenswrapper[4982]: E0122 05:47:54.719789 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.718956 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.719018 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.719074 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.719091 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.723602 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.723677 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.724029 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.723602 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.725226 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 05:47:56 crc kubenswrapper[4982]: I0122 05:47:56.726527 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.388752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.438183 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t8h5w"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.438681 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: W0122 05:48:04.441199 4982 reflector.go:561] object-"openshift-apiserver"/"encryption-config-1": failed to list *v1.Secret: secrets "encryption-config-1" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 05:48:04 crc kubenswrapper[4982]: E0122 05:48:04.441261 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"encryption-config-1\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"encryption-config-1\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 05:48:04 crc kubenswrapper[4982]: W0122 05:48:04.442614 4982 reflector.go:561] object-"openshift-apiserver"/"etcd-client": failed to list *v1.Secret: secrets "etcd-client" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 05:48:04 crc kubenswrapper[4982]: E0122 05:48:04.442643 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"etcd-client\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"etcd-client\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.442747 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 05:48:04 crc kubenswrapper[4982]: W0122 05:48:04.442942 4982 reflector.go:561] object-"openshift-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 05:48:04 crc kubenswrapper[4982]: E0122 05:48:04.442957 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.443090 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: W0122 05:48:04.443546 4982 reflector.go:561] object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff": failed to list *v1.Secret: secrets "openshift-apiserver-sa-dockercfg-djjff" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 05:48:04 crc kubenswrapper[4982]: E0122 05:48:04.443573 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-apiserver-sa-dockercfg-djjff\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-sa-dockercfg-djjff\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 05:48:04 crc kubenswrapper[4982]: W0122 05:48:04.443629 4982 reflector.go:561] object-"openshift-apiserver"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 05:48:04 crc kubenswrapper[4982]: E0122 05:48:04.443642 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.444682 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.444946 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.445276 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.453646 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wb9ck"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.454872 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.456195 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.456813 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.462176 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.463374 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.463690 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.477306 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.477904 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.482139 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.482169 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.482727 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5wl4v"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.482972 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483067 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483316 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483449 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483499 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483542 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.483691 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.484054 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.484303 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.485059 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.485118 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.485977 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-j82cn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.486435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.486831 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.487893 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.492449 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.492713 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.492870 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493010 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493157 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493207 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493338 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493450 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.493622 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.494427 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cw47t"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.495128 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.495188 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.495656 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-frnjg"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.495865 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.496076 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.496125 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.496435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.496599 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.496943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t8h5w"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.497040 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.497393 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.498144 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.498329 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.498456 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.498537 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.499885 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.504266 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.505814 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.506072 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.507781 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.508007 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.509055 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.509766 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.509887 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510187 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510733 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510825 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510871 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510963 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511003 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511042 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511131 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511202 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511249 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511327 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511357 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511479 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511548 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511622 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511660 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511759 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511792 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511871 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511916 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511948 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.510744 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511493 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.512028 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.511211 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.513564 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.513716 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.523059 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.525586 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.526947 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.527231 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.536058 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.539257 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.539897 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.544003 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.544240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.544557 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.545436 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.545481 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5wl4v"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.561943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.562156 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.565409 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.565457 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.565469 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wb9ck"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.565673 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.566455 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.567370 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.567963 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.568179 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.568354 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.568599 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.568812 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.569692 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.569966 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.571195 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.571581 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.577981 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-j82cn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.578032 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cw47t"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.578043 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-frnjg"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.578516 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.578774 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.579225 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.582021 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.584131 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.589437 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.590214 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.594225 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.595552 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.596964 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598139 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a5a9a72-e46b-4794-8141-04583a99a97a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598180 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-trusted-ca\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srnjw\" (UniqueName: \"kubernetes.io/projected/3a5a9a72-e46b-4794-8141-04583a99a97a-kube-api-access-srnjw\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598224 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598240 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit-dir\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598278 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-serving-cert\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598298 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f9321c52-4339-4975-9345-2b4c0f3cb80d-machine-approver-tls\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598337 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598364 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598384 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvwgk\" (UniqueName: \"kubernetes.io/projected/d07c1ac4-7087-47d2-b0d1-c3b840d4678a-kube-api-access-dvwgk\") pod \"downloads-7954f5f757-j82cn\" (UID: \"d07c1ac4-7087-47d2-b0d1-c3b840d4678a\") " pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-client\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598435 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90139865-0d47-4b07-9398-2ee5346f819c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598456 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598473 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b944f9c-2286-488a-b318-7f1e9feb2943-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598493 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-config\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598513 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-serving-cert\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90139865-0d47-4b07-9398-2ee5346f819c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598563 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kplwh\" (UniqueName: \"kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598608 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xxqb\" (UniqueName: \"kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f30341-f779-4860-b675-08eca29196b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598712 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-config\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598741 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598758 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-dir\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598814 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-image-import-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.598993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hwkk\" (UniqueName: \"kubernetes.io/projected/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-kube-api-access-8hwkk\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-serving-cert\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599046 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8x47\" (UniqueName: \"kubernetes.io/projected/706d9ba1-21f0-4e5d-b996-04257e5a2441-kube-api-access-t8x47\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599066 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599082 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599098 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/706d9ba1-21f0-4e5d-b996-04257e5a2441-serving-cert\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599151 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/706d9ba1-21f0-4e5d-b996-04257e5a2441-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599168 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-images\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599212 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599246 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-serving-cert\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599320 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cwq6\" (UniqueName: \"kubernetes.io/projected/3b944f9c-2286-488a-b318-7f1e9feb2943-kube-api-access-9cwq6\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599395 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599420 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cll5\" (UniqueName: \"kubernetes.io/projected/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-kube-api-access-8cll5\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599437 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-auth-proxy-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599451 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-config\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-encryption-config\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599606 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2l5f\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-kube-api-access-d2l5f\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f30341-f779-4860-b675-08eca29196b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzj5h\" (UniqueName: \"kubernetes.io/projected/ff102720-f36a-467b-8ae8-b9a637d6f34a-kube-api-access-jzj5h\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599689 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v45w4\" (UniqueName: \"kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599718 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5rpw\" (UniqueName: \"kubernetes.io/projected/f9321c52-4339-4975-9345-2b4c0f3cb80d-kube-api-access-k5rpw\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599778 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-service-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599809 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4nd6\" (UniqueName: \"kubernetes.io/projected/49f30341-f779-4860-b675-08eca29196b0-kube-api-access-d4nd6\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599862 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599901 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-policies\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599927 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599942 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-node-pullsecrets\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.599979 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.600014 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.600025 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mms7\" (UniqueName: \"kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.600114 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701796 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-config\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-encryption-config\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701910 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701950 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2l5f\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-kube-api-access-d2l5f\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.701999 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f30341-f779-4860-b675-08eca29196b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzj5h\" (UniqueName: \"kubernetes.io/projected/ff102720-f36a-467b-8ae8-b9a637d6f34a-kube-api-access-jzj5h\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702045 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-service-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v45w4\" (UniqueName: \"kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5rpw\" (UniqueName: \"kubernetes.io/projected/f9321c52-4339-4975-9345-2b4c0f3cb80d-kube-api-access-k5rpw\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702165 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4nd6\" (UniqueName: \"kubernetes.io/projected/49f30341-f779-4860-b675-08eca29196b0-kube-api-access-d4nd6\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702187 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702213 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702233 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702285 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-policies\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702303 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-node-pullsecrets\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702320 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mms7\" (UniqueName: \"kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702338 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a5a9a72-e46b-4794-8141-04583a99a97a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702397 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702418 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-trusted-ca\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702437 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srnjw\" (UniqueName: \"kubernetes.io/projected/3a5a9a72-e46b-4794-8141-04583a99a97a-kube-api-access-srnjw\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702456 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit-dir\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702509 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit-dir\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702557 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-node-pullsecrets\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702592 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-serving-cert\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702702 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f9321c52-4339-4975-9345-2b4c0f3cb80d-machine-approver-tls\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702759 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702785 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-client\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-config\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702910 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvwgk\" (UniqueName: \"kubernetes.io/projected/d07c1ac4-7087-47d2-b0d1-c3b840d4678a-kube-api-access-dvwgk\") pod \"downloads-7954f5f757-j82cn\" (UID: \"d07c1ac4-7087-47d2-b0d1-c3b840d4678a\") " pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702936 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90139865-0d47-4b07-9398-2ee5346f819c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702968 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.702994 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b944f9c-2286-488a-b318-7f1e9feb2943-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703055 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-config\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703094 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-serving-cert\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90139865-0d47-4b07-9398-2ee5346f819c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703138 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kplwh\" (UniqueName: \"kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703155 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703178 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xxqb\" (UniqueName: \"kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703198 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f30341-f779-4860-b675-08eca29196b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703233 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703267 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-config\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703315 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703356 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-dir\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-image-import-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-serving-cert\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703438 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703467 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hwkk\" (UniqueName: \"kubernetes.io/projected/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-kube-api-access-8hwkk\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703497 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8x47\" (UniqueName: \"kubernetes.io/projected/706d9ba1-21f0-4e5d-b996-04257e5a2441-kube-api-access-t8x47\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703524 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703548 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703559 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-service-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/706d9ba1-21f0-4e5d-b996-04257e5a2441-serving-cert\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703608 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-images\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703563 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703634 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703664 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703701 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/706d9ba1-21f0-4e5d-b996-04257e5a2441-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwv6m\" (UniqueName: \"kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703753 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-serving-cert\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703779 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703830 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-config\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.703904 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.704566 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.705015 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3a5a9a72-e46b-4794-8141-04583a99a97a-images\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.705230 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.705353 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f30341-f779-4860-b675-08eca29196b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.705536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.706289 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-config\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.706550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.707296 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.707517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.707590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708160 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/706d9ba1-21f0-4e5d-b996-04257e5a2441-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708291 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708336 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cwq6\" (UniqueName: \"kubernetes.io/projected/3b944f9c-2286-488a-b318-7f1e9feb2943-kube-api-access-9cwq6\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708360 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708400 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-auth-proxy-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708494 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708520 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cll5\" (UniqueName: \"kubernetes.io/projected/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-kube-api-access-8cll5\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.708874 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-dir\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.709239 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-serving-cert\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.709490 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710142 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710201 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-auth-proxy-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710367 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-trusted-ca\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710466 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90139865-0d47-4b07-9398-2ee5346f819c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-encryption-config\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710781 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.709642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-audit\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.710986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.711139 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9321c52-4339-4975-9345-2b4c0f3cb80d-config\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.711420 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.712005 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-image-import-ca\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.712242 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ff102720-f36a-467b-8ae8-b9a637d6f34a-audit-policies\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.713334 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a5a9a72-e46b-4794-8141-04583a99a97a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.713499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.713743 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f30341-f779-4860-b675-08eca29196b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.715442 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.716026 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.716215 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90139865-0d47-4b07-9398-2ee5346f819c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.716843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/706d9ba1-21f0-4e5d-b996-04257e5a2441-serving-cert\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.717071 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.717116 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-serving-cert\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.717081 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-serving-cert\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.717535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-serving-cert\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.718112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b944f9c-2286-488a-b318-7f1e9feb2943-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.718226 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff102720-f36a-467b-8ae8-b9a637d6f34a-etcd-client\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.718241 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f9321c52-4339-4975-9345-2b4c0f3cb80d-machine-approver-tls\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.722883 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.735748 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2l5f\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-kube-api-access-d2l5f\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.758113 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v45w4\" (UniqueName: \"kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4\") pod \"controller-manager-879f6c89f-jbn6z\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.776741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5rpw\" (UniqueName: \"kubernetes.io/projected/f9321c52-4339-4975-9345-2b4c0f3cb80d-kube-api-access-k5rpw\") pod \"machine-approver-56656f9798-bv2fn\" (UID: \"f9321c52-4339-4975-9345-2b4c0f3cb80d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.802817 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.803273 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-bchfc"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.803705 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.805576 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.807267 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vzjlc"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.808764 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810405 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810556 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810660 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810724 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.810976 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811135 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811240 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811343 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811489 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811875 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwv6m\" (UniqueName: \"kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.811977 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.812073 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.816970 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.817482 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.818525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.819406 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.823041 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.823469 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.824178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.824916 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.824992 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.825015 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.826235 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.831957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4nd6\" (UniqueName: \"kubernetes.io/projected/49f30341-f779-4860-b675-08eca29196b0-kube-api-access-d4nd6\") pod \"openshift-apiserver-operator-796bbdcf4f-fqnps\" (UID: \"49f30341-f779-4860-b675-08eca29196b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.832969 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.833240 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.833741 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lmpt5"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.834533 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.835000 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.837692 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.839499 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.840277 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.845585 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.846634 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzj5h\" (UniqueName: \"kubernetes.io/projected/ff102720-f36a-467b-8ae8-b9a637d6f34a-kube-api-access-jzj5h\") pod \"apiserver-7bbb656c7d-sjh6j\" (UID: \"ff102720-f36a-467b-8ae8-b9a637d6f34a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.848722 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.849792 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.850148 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.854312 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90139865-0d47-4b07-9398-2ee5346f819c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4sqvk\" (UID: \"90139865-0d47-4b07-9398-2ee5346f819c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.855520 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xrrnp"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.856984 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.872449 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.877396 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.887152 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.888980 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.890395 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvwgk\" (UniqueName: \"kubernetes.io/projected/d07c1ac4-7087-47d2-b0d1-c3b840d4678a-kube-api-access-dvwgk\") pod \"downloads-7954f5f757-j82cn\" (UID: \"d07c1ac4-7087-47d2-b0d1-c3b840d4678a\") " pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.895561 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.899402 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.900001 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.900043 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.903625 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-v2wz9"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.904627 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.905229 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.906194 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.908486 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.908810 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8x47\" (UniqueName: \"kubernetes.io/projected/706d9ba1-21f0-4e5d-b996-04257e5a2441-kube-api-access-t8x47\") pod \"openshift-config-operator-7777fb866f-cw47t\" (UID: \"706d9ba1-21f0-4e5d-b996-04257e5a2441\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.909168 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.913032 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-ckc67"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.913618 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.914134 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.914718 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.916339 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.921374 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.922163 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-knbtr"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923301 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923528 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923600 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343b42fa-997d-498f-ade5-4fefffd26aba-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923627 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g2pk\" (UniqueName: \"kubernetes.io/projected/4e394150-d9e9-43f0-99ae-036e0e8de268-kube-api-access-4g2pk\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-registration-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923695 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75351d44-1e40-47f5-b156-159b8ae57252-config\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923716 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-socket-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923742 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75351d44-1e40-47f5-b156-159b8ae57252-serving-cert\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923767 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343b42fa-997d-498f-ade5-4fefffd26aba-config\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923789 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvjvk\" (UniqueName: \"kubernetes.io/projected/c703594d-da70-4260-a7a8-dd6b0ebf270a-kube-api-access-lvjvk\") pod \"migrator-59844c95c7-b7nkb\" (UID: \"c703594d-da70-4260-a7a8-dd6b0ebf270a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c37351-6524-4846-82a5-5174a5254291-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.923982 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtf8r\" (UniqueName: \"kubernetes.io/projected/83eac823-fad9-4de9-8f32-97bb096128c7-kube-api-access-qtf8r\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c37351-6524-4846-82a5-5174a5254291-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924081 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83eac823-fad9-4de9-8f32-97bb096128c7-config-volume\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924107 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001b154-4839-4c44-a79b-2be8fcbfb706-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmkm4\" (UniqueName: \"kubernetes.io/projected/c1c37351-6524-4846-82a5-5174a5254291-kube-api-access-rmkm4\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924172 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwmjz\" (UniqueName: \"kubernetes.io/projected/1001b154-4839-4c44-a79b-2be8fcbfb706-kube-api-access-mwmjz\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924209 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/83eac823-fad9-4de9-8f32-97bb096128c7-metrics-tls\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924228 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-plugins-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-csi-data-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924325 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-mountpoint-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924343 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vzz4\" (UniqueName: \"kubernetes.io/projected/75351d44-1e40-47f5-b156-159b8ae57252-kube-api-access-5vzz4\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.924376 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343b42fa-997d-498f-ade5-4fefffd26aba-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.928873 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hwkk\" (UniqueName: \"kubernetes.io/projected/4b05331c-0a14-4806-8dcc-6ccd6dece1a9-kube-api-access-8hwkk\") pod \"authentication-operator-69f744f599-frnjg\" (UID: \"4b05331c-0a14-4806-8dcc-6ccd6dece1a9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.929202 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.930519 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.930657 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.931175 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8mjxt"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.931231 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.931634 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.933043 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.933947 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.934373 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.935049 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.939195 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-q5lsn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.939793 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.940422 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.940883 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.940945 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.941734 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.942904 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.943173 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kplwh\" (UniqueName: \"kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh\") pod \"route-controller-manager-6576b87f9c-gsktj\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.943267 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vzjlc"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.948566 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.950693 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bchfc"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.951651 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.951943 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.954208 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.955009 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.955808 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cll5\" (UniqueName: \"kubernetes.io/projected/b1b54ad9-d7a6-4f85-922f-a1de9574eefb-kube-api-access-8cll5\") pod \"console-operator-58897d9998-5wl4v\" (UID: \"b1b54ad9-d7a6-4f85-922f-a1de9574eefb\") " pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.956371 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.958733 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.959565 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.960436 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.961660 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xrrnp"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.963335 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.964512 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-knbtr"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.965830 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.971188 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.972560 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.982497 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lmpt5"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.983405 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srnjw\" (UniqueName: \"kubernetes.io/projected/3a5a9a72-e46b-4794-8141-04583a99a97a-kube-api-access-srnjw\") pod \"machine-api-operator-5694c8668f-wb9ck\" (UID: \"3a5a9a72-e46b-4794-8141-04583a99a97a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.985167 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-v2wz9"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.991396 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.993996 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.996468 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb"] Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.997312 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xxqb\" (UniqueName: \"kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb\") pod \"console-f9d7485db-2rfct\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:04 crc kubenswrapper[4982]: I0122 05:48:04.999265 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.008106 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.010718 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.012472 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8mjxt"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.014980 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343b42fa-997d-498f-ade5-4fefffd26aba-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g2pk\" (UniqueName: \"kubernetes.io/projected/4e394150-d9e9-43f0-99ae-036e0e8de268-kube-api-access-4g2pk\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-registration-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025654 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75351d44-1e40-47f5-b156-159b8ae57252-config\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025672 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-socket-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025692 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75351d44-1e40-47f5-b156-159b8ae57252-serving-cert\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343b42fa-997d-498f-ade5-4fefffd26aba-config\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvjvk\" (UniqueName: \"kubernetes.io/projected/c703594d-da70-4260-a7a8-dd6b0ebf270a-kube-api-access-lvjvk\") pod \"migrator-59844c95c7-b7nkb\" (UID: \"c703594d-da70-4260-a7a8-dd6b0ebf270a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025767 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c37351-6524-4846-82a5-5174a5254291-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025786 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtf8r\" (UniqueName: \"kubernetes.io/projected/83eac823-fad9-4de9-8f32-97bb096128c7-kube-api-access-qtf8r\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c37351-6524-4846-82a5-5174a5254291-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025830 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83eac823-fad9-4de9-8f32-97bb096128c7-config-volume\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025866 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001b154-4839-4c44-a79b-2be8fcbfb706-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025888 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmkm4\" (UniqueName: \"kubernetes.io/projected/c1c37351-6524-4846-82a5-5174a5254291-kube-api-access-rmkm4\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025916 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwmjz\" (UniqueName: \"kubernetes.io/projected/1001b154-4839-4c44-a79b-2be8fcbfb706-kube-api-access-mwmjz\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-plugins-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.025958 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/83eac823-fad9-4de9-8f32-97bb096128c7-metrics-tls\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026180 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-csi-data-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-mountpoint-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vzz4\" (UniqueName: \"kubernetes.io/projected/75351d44-1e40-47f5-b156-159b8ae57252-kube-api-access-5vzz4\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026254 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343b42fa-997d-498f-ade5-4fefffd26aba-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-registration-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026762 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-socket-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.026964 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-plugins-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.027232 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-mountpoint-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.027248 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e394150-d9e9-43f0-99ae-036e0e8de268-csi-data-dir\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.027951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cwq6\" (UniqueName: \"kubernetes.io/projected/3b944f9c-2286-488a-b318-7f1e9feb2943-kube-api-access-9cwq6\") pod \"cluster-samples-operator-665b6dd947-dwszh\" (UID: \"3b944f9c-2286-488a-b318-7f1e9feb2943\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.054075 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.065751 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.080771 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.091881 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/83eac823-fad9-4de9-8f32-97bb096128c7-metrics-tls\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.103165 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.106224 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.107841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83eac823-fad9-4de9-8f32-97bb096128c7-config-volume\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.122171 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.125835 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.141377 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.142557 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.163673 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.169208 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.184543 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.203012 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.207382 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.222629 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.242959 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.261681 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.284217 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.295594 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343b42fa-997d-498f-ade5-4fefffd26aba-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.306623 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.307532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343b42fa-997d-498f-ade5-4fefffd26aba-config\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.340607 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwv6m\" (UniqueName: \"kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m\") pod \"oauth-openshift-558db77b4-lbczg\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.346825 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.352769 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75351d44-1e40-47f5-b156-159b8ae57252-serving-cert\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.362472 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.368025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75351d44-1e40-47f5-b156-159b8ae57252-config\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.382271 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.401146 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.422216 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.441573 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.464460 4982 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.481466 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.485234 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.486410 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-j82cn"] Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.496293 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41db98f4_c026_42e8_8486_412643968146.slice/crio-49a923596a64ca150a237f656779dfa92830e5675fb4fe9c71ac7b47ed4e8162 WatchSource:0}: Error finding container 49a923596a64ca150a237f656779dfa92830e5675fb4fe9c71ac7b47ed4e8162: Status 404 returned error can't find the container with id 49a923596a64ca150a237f656779dfa92830e5675fb4fe9c71ac7b47ed4e8162 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.500633 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.522315 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.541099 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.561100 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.579415 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.582643 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.588680 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c37351-6524-4846-82a5-5174a5254291-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.602156 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.603634 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cw47t"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.612595 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.613952 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1001b154-4839-4c44-a79b-2be8fcbfb706-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.617093 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod706d9ba1_21f0_4e5d_b996_04257e5a2441.slice/crio-980739a85b26fdb76a50a10405c52b6d19b2296737787d39f6935c4e88820564 WatchSource:0}: Error finding container 980739a85b26fdb76a50a10405c52b6d19b2296737787d39f6935c4e88820564: Status 404 returned error can't find the container with id 980739a85b26fdb76a50a10405c52b6d19b2296737787d39f6935c4e88820564 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.621715 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.626791 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b5ce2a2_8a71_4fbe_a860_c89c1be82f08.slice/crio-40dbedf114b92a318a8846a2dd75f4e81dd08f33aaed2d8d1b0913e3b4231e29 WatchSource:0}: Error finding container 40dbedf114b92a318a8846a2dd75f4e81dd08f33aaed2d8d1b0913e3b4231e29: Status 404 returned error can't find the container with id 40dbedf114b92a318a8846a2dd75f4e81dd08f33aaed2d8d1b0913e3b4231e29 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.643527 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.661971 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.684763 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.686557 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.691091 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-frnjg"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.691250 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c37351-6524-4846-82a5-5174a5254291-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.693101 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff102720_f36a_467b_8ae8_b9a637d6f34a.slice/crio-abd932c9217e6fdbf6292acedbb87184aef8354e35231b5c5b73afa664d19fed WatchSource:0}: Error finding container abd932c9217e6fdbf6292acedbb87184aef8354e35231b5c5b73afa664d19fed: Status 404 returned error can't find the container with id abd932c9217e6fdbf6292acedbb87184aef8354e35231b5c5b73afa664d19fed Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.703436 4982 configmap.go:193] Couldn't get configMap openshift-apiserver/config: failed to sync configmap cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.703526 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config podName:3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.203500757 +0000 UTC m=+147.042138760 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config") pod "apiserver-76f77b778f-t8h5w" (UID: "3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f") : failed to sync configmap cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.705625 4982 secret.go:188] Couldn't get secret openshift-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.705680 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config podName:3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.205663034 +0000 UTC m=+147.044301037 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config") pod "apiserver-76f77b778f-t8h5w" (UID: "3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f") : failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.706347 4982 secret.go:188] Couldn't get secret openshift-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.706379 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client podName:3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.206370472 +0000 UTC m=+147.045008475 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client") pod "apiserver-76f77b778f-t8h5w" (UID: "3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f") : failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.755284 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.785303 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.785737 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.786299 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.797988 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk"] Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.800991 4982 projected.go:288] Couldn't get configMap openshift-apiserver/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.801026 4982 projected.go:194] Error preparing data for projected volume kube-api-access-4mms7 for pod openshift-apiserver/apiserver-76f77b778f-t8h5w: failed to sync configmap cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: E0122 05:48:05.801110 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7 podName:3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f nodeName:}" failed. No retries permitted until 2026-01-22 05:48:06.301084492 +0000 UTC m=+147.139722495 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-4mms7" (UniqueName: "kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7") pod "apiserver-76f77b778f-t8h5w" (UID: "3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f") : failed to sync configmap cache: timed out waiting for the condition Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.804612 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.821455 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.829262 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.853628 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.856902 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5wl4v"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.860677 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.861334 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.863385 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wb9ck"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.865519 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh"] Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.865560 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.872807 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9dca4eb_9076_4a32_a851_55d4649cdbf2.slice/crio-362441eef66e1d7d4a3ad9f0e14e27d42c25624f62c9e85b717b4c38b9b3564f WatchSource:0}: Error finding container 362441eef66e1d7d4a3ad9f0e14e27d42c25624f62c9e85b717b4c38b9b3564f: Status 404 returned error can't find the container with id 362441eef66e1d7d4a3ad9f0e14e27d42c25624f62c9e85b717b4c38b9b3564f Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.874136 4982 generic.go:334] "Generic (PLEG): container finished" podID="706d9ba1-21f0-4e5d-b996-04257e5a2441" containerID="17629e715175b465411be536f2ebfeb0f3135947f029f60b4827344badd4fe93" exitCode=0 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.874207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" event={"ID":"706d9ba1-21f0-4e5d-b996-04257e5a2441","Type":"ContainerDied","Data":"17629e715175b465411be536f2ebfeb0f3135947f029f60b4827344badd4fe93"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.874235 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" event={"ID":"706d9ba1-21f0-4e5d-b996-04257e5a2441","Type":"ContainerStarted","Data":"980739a85b26fdb76a50a10405c52b6d19b2296737787d39f6935c4e88820564"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.881791 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" event={"ID":"41db98f4-c026-42e8-8486-412643968146","Type":"ContainerStarted","Data":"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.881952 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" event={"ID":"41db98f4-c026-42e8-8486-412643968146","Type":"ContainerStarted","Data":"49a923596a64ca150a237f656779dfa92830e5675fb4fe9c71ac7b47ed4e8162"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.881818 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.882452 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.884547 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-j82cn" event={"ID":"d07c1ac4-7087-47d2-b0d1-c3b840d4678a","Type":"ContainerStarted","Data":"2b5e92a2eae8277ab47a23ed032f44df75dc97615b8c8bfe2a71c17b1e18004a"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.884579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-j82cn" event={"ID":"d07c1ac4-7087-47d2-b0d1-c3b840d4678a","Type":"ContainerStarted","Data":"12adcad285a90c619cd7431fe958e86227cff47551da5832f069b7696b188c3d"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.884995 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.886091 4982 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jbn6z container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.886136 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" podUID="41db98f4-c026-42e8-8486-412643968146" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.887246 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-j82cn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.887316 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-j82cn" podUID="d07c1ac4-7087-47d2-b0d1-c3b840d4678a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.887924 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" event={"ID":"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08","Type":"ContainerStarted","Data":"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.887966 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" event={"ID":"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08","Type":"ContainerStarted","Data":"40dbedf114b92a318a8846a2dd75f4e81dd08f33aaed2d8d1b0913e3b4231e29"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.888086 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.890506 4982 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-gsktj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.890553 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.891816 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" event={"ID":"ff102720-f36a-467b-8ae8-b9a637d6f34a","Type":"ContainerStarted","Data":"abd932c9217e6fdbf6292acedbb87184aef8354e35231b5c5b73afa664d19fed"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.894934 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" event={"ID":"4b05331c-0a14-4806-8dcc-6ccd6dece1a9","Type":"ContainerStarted","Data":"7660cc975bb658146e529f2d742f9348429d36bc23adcc40a522408034569b3e"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.902679 4982 request.go:700] Waited for 1.00670457s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.905181 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.920010 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" event={"ID":"90139865-0d47-4b07-9398-2ee5346f819c","Type":"ContainerStarted","Data":"5ae0a7f09b65b4cbd31e5a97be18517920bfed6d2ee1a19cc31dfcd29a82aedc"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.922515 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 05:48:05 crc kubenswrapper[4982]: W0122 05:48:05.924594 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a5a9a72_e46b_4794_8141_04583a99a97a.slice/crio-a3037525cb9a24f5502febd2cc197772765724c954d66ec37b5c159620b3c650 WatchSource:0}: Error finding container a3037525cb9a24f5502febd2cc197772765724c954d66ec37b5c159620b3c650: Status 404 returned error can't find the container with id a3037525cb9a24f5502febd2cc197772765724c954d66ec37b5c159620b3c650 Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.931403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" event={"ID":"f9321c52-4339-4975-9345-2b4c0f3cb80d","Type":"ContainerStarted","Data":"4b341e9e88ed33062c80ed596b8e2e6a8def49e6936edd493d6e3375dd96a1bb"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.931569 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" event={"ID":"f9321c52-4339-4975-9345-2b4c0f3cb80d","Type":"ContainerStarted","Data":"6bb8c104153710c9d349bcf4348230abb8821f2973636f452ccf11f26932ca06"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.931588 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" event={"ID":"f9321c52-4339-4975-9345-2b4c0f3cb80d","Type":"ContainerStarted","Data":"38f9b9b453e39d554fb6965672538f4ebf62c6e0258e553a88c5937f664fb57d"} Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.946477 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.963756 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 05:48:05 crc kubenswrapper[4982]: I0122 05:48:05.981704 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.001668 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.022891 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.052524 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.063784 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.089646 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.103262 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.120923 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.142812 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.161457 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.189202 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.203785 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.224258 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.229252 4982 csr.go:261] certificate signing request csr-cpmxv is approved, waiting to be issued Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.237309 4982 csr.go:257] certificate signing request csr-cpmxv is issued Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.241020 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.263010 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.267606 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.267660 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.267698 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.290356 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.304215 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.322531 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.344040 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.362301 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.369215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mms7\" (UniqueName: \"kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.389067 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.401403 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.421566 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.443658 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.461331 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.483161 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.501934 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.521984 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.541944 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.563929 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.587946 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.601202 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.627211 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.645800 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.661471 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.673128 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.673520 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:48:06 crc kubenswrapper[4982]: E0122 05:48:06.674456 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:50:08.674436211 +0000 UTC m=+269.513074204 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.683708 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.684161 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.703274 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.730133 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.741092 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.761426 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.774719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.775581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.782206 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.802864 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.821025 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.847490 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.863632 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.881650 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.903200 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.919310 4982 request.go:700] Waited for 1.979114643s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/configmaps?fieldSelector=metadata.name%3Dservice-ca-bundle&limit=500&resourceVersion=0 Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.921104 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.938101 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" event={"ID":"b1b54ad9-d7a6-4f85-922f-a1de9574eefb","Type":"ContainerStarted","Data":"c36b87491eb06db4a3d96a5ebc86864994f54e35f0951d2403afb443f0dc0a2e"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.938147 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" event={"ID":"b1b54ad9-d7a6-4f85-922f-a1de9574eefb","Type":"ContainerStarted","Data":"c73fea0225a5ec8eb32a8b7cacb72ddbe96675ff5587cd63feb435601f14dad0"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.939971 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.941177 4982 patch_prober.go:28] interesting pod/console-operator-58897d9998-5wl4v container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/readyz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.941211 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" podUID="b1b54ad9-d7a6-4f85-922f-a1de9574eefb" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.8:8443/readyz\": dial tcp 10.217.0.8:8443: connect: connection refused" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.941488 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" event={"ID":"4b05331c-0a14-4806-8dcc-6ccd6dece1a9","Type":"ContainerStarted","Data":"8513b75e7e15ad5cc1788c240d4a470ffd5a744e03c6f756f508ff74ac8de4b8"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.941893 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.943447 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" event={"ID":"706d9ba1-21f0-4e5d-b996-04257e5a2441","Type":"ContainerStarted","Data":"de6d3d61bb12e60bee4fa5e376ee91bf7bf922b9bbe4623478bdd5e91f8430b6"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.943772 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.946975 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" event={"ID":"3b944f9c-2286-488a-b318-7f1e9feb2943","Type":"ContainerStarted","Data":"c3c854d8f0ebe225c56f52eba2a7490bb9b87d04b25ca264d1c4dbb8f737444e"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.947002 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" event={"ID":"3b944f9c-2286-488a-b318-7f1e9feb2943","Type":"ContainerStarted","Data":"0a3cd217a4d1cb727cec01757e81456e9bef83a46872980785b1276607e08cb3"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.947013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" event={"ID":"3b944f9c-2286-488a-b318-7f1e9feb2943","Type":"ContainerStarted","Data":"b18e46db5a10cb9ad803b078d8e314f59d364a30820051ff2316897d6354dc97"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.948656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" event={"ID":"90139865-0d47-4b07-9398-2ee5346f819c","Type":"ContainerStarted","Data":"a2881c2d525ba347be3d71562d7dd3a6da1e258ed783b7f0bbe5f08291a8a62b"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.955773 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" event={"ID":"49f30341-f779-4860-b675-08eca29196b0","Type":"ContainerStarted","Data":"bd61f7270d448fc7e0f280c3294a8208b047fb0b3b04c8e8124bcd77ff178e67"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.955816 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" event={"ID":"49f30341-f779-4860-b675-08eca29196b0","Type":"ContainerStarted","Data":"43a8de3a5479fc6bd745e0a4bfe2161e02345b81cc235890f2f8127502b49c54"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.957613 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2rfct" event={"ID":"a9dca4eb-9076-4a32-a851-55d4649cdbf2","Type":"ContainerStarted","Data":"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.957641 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2rfct" event={"ID":"a9dca4eb-9076-4a32-a851-55d4649cdbf2","Type":"ContainerStarted","Data":"362441eef66e1d7d4a3ad9f0e14e27d42c25624f62c9e85b717b4c38b9b3564f"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.959114 4982 generic.go:334] "Generic (PLEG): container finished" podID="ff102720-f36a-467b-8ae8-b9a637d6f34a" containerID="2381f8bc8e88f506b8172f9547a12fad575c74a78b255b9f58a5ed7ae1b3849f" exitCode=0 Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.959160 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" event={"ID":"ff102720-f36a-467b-8ae8-b9a637d6f34a","Type":"ContainerDied","Data":"2381f8bc8e88f506b8172f9547a12fad575c74a78b255b9f58a5ed7ae1b3849f"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.961267 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" event={"ID":"3ab253fb-76c6-4f2c-b718-c37afd8540ff","Type":"ContainerStarted","Data":"0faa21172f5ab02f904dcf3d83494afcbd7d3db2678ed5365781314150bfc3b5"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.961290 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" event={"ID":"3ab253fb-76c6-4f2c-b718-c37afd8540ff","Type":"ContainerStarted","Data":"8a3eae5bdeed3145462499c8a5506d12f14331bea9a8564504dab22088019da4"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.961866 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.962130 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.963587 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.964759 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-j82cn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.964791 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-j82cn" podUID="d07c1ac4-7087-47d2-b0d1-c3b840d4678a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.964831 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" event={"ID":"3a5a9a72-e46b-4794-8141-04583a99a97a","Type":"ContainerStarted","Data":"92d8ccd67578668d145fd992e649117d51fdf3c8585d0ad8949e44aed516694a"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.964865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" event={"ID":"3a5a9a72-e46b-4794-8141-04583a99a97a","Type":"ContainerStarted","Data":"4859d1a568b17d60acbe3158de4c49b2f65c0f2807bce6aeb02c702dc3ebb207"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.964877 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" event={"ID":"3a5a9a72-e46b-4794-8141-04583a99a97a","Type":"ContainerStarted","Data":"a3037525cb9a24f5502febd2cc197772765724c954d66ec37b5c159620b3c650"} Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.967544 4982 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lbczg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" start-of-body= Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.967575 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.970449 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.982013 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.982978 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.983052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.991262 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.991806 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 05:48:06 crc kubenswrapper[4982]: I0122 05:48:06.992519 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.013759 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.022158 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.044471 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.108150 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343b42fa-997d-498f-ade5-4fefffd26aba-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kn79s\" (UID: \"343b42fa-997d-498f-ade5-4fefffd26aba\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.116927 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g2pk\" (UniqueName: \"kubernetes.io/projected/4e394150-d9e9-43f0-99ae-036e0e8de268-kube-api-access-4g2pk\") pod \"csi-hostpathplugin-lmpt5\" (UID: \"4e394150-d9e9-43f0-99ae-036e0e8de268\") " pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.143470 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvjvk\" (UniqueName: \"kubernetes.io/projected/c703594d-da70-4260-a7a8-dd6b0ebf270a-kube-api-access-lvjvk\") pod \"migrator-59844c95c7-b7nkb\" (UID: \"c703594d-da70-4260-a7a8-dd6b0ebf270a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.163989 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwmjz\" (UniqueName: \"kubernetes.io/projected/1001b154-4839-4c44-a79b-2be8fcbfb706-kube-api-access-mwmjz\") pod \"control-plane-machine-set-operator-78cbb6b69f-6zct7\" (UID: \"1001b154-4839-4c44-a79b-2be8fcbfb706\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.197519 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmkm4\" (UniqueName: \"kubernetes.io/projected/c1c37351-6524-4846-82a5-5174a5254291-kube-api-access-rmkm4\") pod \"kube-storage-version-migrator-operator-b67b599dd-jtzgr\" (UID: \"c1c37351-6524-4846-82a5-5174a5254291\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.200891 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtf8r\" (UniqueName: \"kubernetes.io/projected/83eac823-fad9-4de9-8f32-97bb096128c7-kube-api-access-qtf8r\") pod \"dns-default-bchfc\" (UID: \"83eac823-fad9-4de9-8f32-97bb096128c7\") " pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.217928 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vzz4\" (UniqueName: \"kubernetes.io/projected/75351d44-1e40-47f5-b156-159b8ae57252-kube-api-access-5vzz4\") pod \"service-ca-operator-777779d784-lm8gn\" (UID: \"75351d44-1e40-47f5-b156-159b8ae57252\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.223419 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.235441 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-etcd-client\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.238922 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-22 05:43:06 +0000 UTC, rotation deadline is 2026-11-06 08:40:24.632371085 +0000 UTC Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.238953 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6914h52m17.393420432s for next certificate rotation Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.244641 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.246115 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.251019 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.271073 4982 secret.go:188] Couldn't get secret openshift-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.271166 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config podName:3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.271144345 +0000 UTC m=+149.109782348 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config") pod "apiserver-76f77b778f-t8h5w" (UID: "3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f") : failed to sync secret cache: timed out waiting for the condition Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.283249 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288300 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288353 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288391 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288424 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqvgt\" (UniqueName: \"kubernetes.io/projected/bf158f64-3928-408e-bd28-19eef4de1e71-kube-api-access-vqvgt\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288444 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrrqw\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288477 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf158f64-3928-408e-bd28-19eef4de1e71-metrics-tls\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288624 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.288680 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.289049 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:07.789034764 +0000 UTC m=+148.627672767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.298667 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.299118 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.305000 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.327629 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.331600 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.345361 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mms7\" (UniqueName: \"kubernetes.io/projected/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-kube-api-access-4mms7\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.369936 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.370173 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.384885 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.389090 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.389743 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390152 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrrqw\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390233 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e0822f5-3b74-4cf5-93f7-4baa345bb385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390278 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b89f2578-d195-48ea-aa15-009d36b7f6d9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf158f64-3928-408e-bd28-19eef4de1e71-metrics-tls\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390400 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b89f2578-d195-48ea-aa15-009d36b7f6d9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390445 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tjfr\" (UniqueName: \"kubernetes.io/projected/a24eabd9-f7ca-4ea9-9095-3086e431730a-kube-api-access-6tjfr\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390503 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390572 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-node-bootstrap-token\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390600 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390698 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqvgt\" (UniqueName: \"kubernetes.io/projected/bf158f64-3928-408e-bd28-19eef4de1e71-kube-api-access-vqvgt\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390722 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b89f2578-d195-48ea-aa15-009d36b7f6d9-config\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390742 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-certs\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.390760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdgsc\" (UniqueName: \"kubernetes.io/projected/9e0822f5-3b74-4cf5-93f7-4baa345bb385-kube-api-access-qdgsc\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.391969 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:07.891942599 +0000 UTC m=+148.730580602 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.409430 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.413101 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.413214 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.414692 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf158f64-3928-408e-bd28-19eef4de1e71-metrics-tls\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.417619 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.427480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.450474 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.468494 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrrqw\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.545699 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqvgt\" (UniqueName: \"kubernetes.io/projected/bf158f64-3928-408e-bd28-19eef4de1e71-kube-api-access-vqvgt\") pod \"dns-operator-744455d44c-vzjlc\" (UID: \"bf158f64-3928-408e-bd28-19eef4de1e71\") " pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548368 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6vmw\" (UniqueName: \"kubernetes.io/projected/a0a38dd0-b500-4109-a933-2e517ee78cc5-kube-api-access-l6vmw\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-862x6\" (UniqueName: \"kubernetes.io/projected/bfedb5ab-3262-41f1-967d-845bb0df711e-kube-api-access-862x6\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548431 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-webhook-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548526 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-service-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr926\" (UniqueName: \"kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548573 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/352bc8b4-5f52-41c7-894e-42eef4f5b073-trusted-ca\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548613 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac6d4617-2a46-45b8-884f-e32e41ca5689-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-images\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-apiservice-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548796 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/279ad2a6-b800-4838-a0d6-73ffaaa6f824-cert\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.548835 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e0822f5-3b74-4cf5-93f7-4baa345bb385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552438 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgp22\" (UniqueName: \"kubernetes.io/projected/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-kube-api-access-bgp22\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552691 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-stats-auth\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552738 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b89f2578-d195-48ea-aa15-009d36b7f6d9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfedb5ab-3262-41f1-967d-845bb0df711e-proxy-tls\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552883 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/702c7842-d8b6-4b21-a805-e994ce070a62-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.552989 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac6d4617-2a46-45b8-884f-e32e41ca5689-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b89f2578-d195-48ea-aa15-009d36b7f6d9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554163 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/efeb33e7-2fb0-4299-8759-4ce51405ce82-tmpfs\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554266 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554295 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/352bc8b4-5f52-41c7-894e-42eef4f5b073-metrics-tls\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tjfr\" (UniqueName: \"kubernetes.io/projected/a24eabd9-f7ca-4ea9-9095-3086e431730a-kube-api-access-6tjfr\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554393 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxbxw\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-kube-api-access-cxbxw\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554474 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfedb5ab-3262-41f1-967d-845bb0df711e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554511 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kgzj\" (UniqueName: \"kubernetes.io/projected/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-kube-api-access-5kgzj\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29gx9\" (UniqueName: \"kubernetes.io/projected/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-kube-api-access-29gx9\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cmh2\" (UniqueName: \"kubernetes.io/projected/702c7842-d8b6-4b21-a805-e994ce070a62-kube-api-access-4cmh2\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554655 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjghz\" (UniqueName: \"kubernetes.io/projected/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-kube-api-access-bjghz\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554679 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-cabundle\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554702 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-srv-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmwdr\" (UniqueName: \"kubernetes.io/projected/dbc18849-6eed-4691-adf0-38acbf522afb-kube-api-access-fmwdr\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554813 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554836 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-metrics-certs\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554901 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-service-ca-bundle\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-node-bootstrap-token\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.554988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557139 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-default-certificate\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557680 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-serving-cert\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-key\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557740 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q6nd\" (UniqueName: \"kubernetes.io/projected/efeb33e7-2fb0-4299-8759-4ce51405ce82-kube-api-access-4q6nd\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557763 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6d4617-2a46-45b8-884f-e32e41ca5689-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557804 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/702c7842-d8b6-4b21-a805-e994ce070a62-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557822 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-config\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557858 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-proxy-tls\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557928 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-profile-collector-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.557996 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljc5c\" (UniqueName: \"kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.558037 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b89f2578-d195-48ea-aa15-009d36b7f6d9-config\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.558060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-certs\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.558083 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.570664 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b89f2578-d195-48ea-aa15-009d36b7f6d9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.571132 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.071102159 +0000 UTC m=+148.909740162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.575112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b89f2578-d195-48ea-aa15-009d36b7f6d9-config\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.579899 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdgsc\" (UniqueName: \"kubernetes.io/projected/9e0822f5-3b74-4cf5-93f7-4baa345bb385-kube-api-access-qdgsc\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.579966 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-client\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.580004 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-srv-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.580040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkppb\" (UniqueName: \"kubernetes.io/projected/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-kube-api-access-mkppb\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.586032 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-certs\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.586278 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a24eabd9-f7ca-4ea9-9095-3086e431730a-node-bootstrap-token\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.586552 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.591494 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs8jn\" (UniqueName: \"kubernetes.io/projected/279ad2a6-b800-4838-a0d6-73ffaaa6f824-kube-api-access-fs8jn\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.591714 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.591780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.602298 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e0822f5-3b74-4cf5-93f7-4baa345bb385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.620990 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdgsc\" (UniqueName: \"kubernetes.io/projected/9e0822f5-3b74-4cf5-93f7-4baa345bb385-kube-api-access-qdgsc\") pod \"package-server-manager-789f6589d5-dq4hk\" (UID: \"9e0822f5-3b74-4cf5-93f7-4baa345bb385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.628047 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b89f2578-d195-48ea-aa15-009d36b7f6d9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6d42n\" (UID: \"b89f2578-d195-48ea-aa15-009d36b7f6d9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.668649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tjfr\" (UniqueName: \"kubernetes.io/projected/a24eabd9-f7ca-4ea9-9095-3086e431730a-kube-api-access-6tjfr\") pod \"machine-config-server-ckc67\" (UID: \"a24eabd9-f7ca-4ea9-9095-3086e431730a\") " pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693492 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693701 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjghz\" (UniqueName: \"kubernetes.io/projected/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-kube-api-access-bjghz\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693727 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-cabundle\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693746 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-srv-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693762 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmwdr\" (UniqueName: \"kubernetes.io/projected/dbc18849-6eed-4691-adf0-38acbf522afb-kube-api-access-fmwdr\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693783 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693800 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-metrics-certs\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693815 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-service-ca-bundle\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693831 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693869 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-default-certificate\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693891 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-serving-cert\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-key\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693926 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q6nd\" (UniqueName: \"kubernetes.io/projected/efeb33e7-2fb0-4299-8759-4ce51405ce82-kube-api-access-4q6nd\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693943 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6d4617-2a46-45b8-884f-e32e41ca5689-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/702c7842-d8b6-4b21-a805-e994ce070a62-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693976 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-config\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.693992 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-proxy-tls\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694010 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-profile-collector-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694210 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljc5c\" (UniqueName: \"kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-client\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694260 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-srv-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkppb\" (UniqueName: \"kubernetes.io/projected/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-kube-api-access-mkppb\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694318 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs8jn\" (UniqueName: \"kubernetes.io/projected/279ad2a6-b800-4838-a0d6-73ffaaa6f824-kube-api-access-fs8jn\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694338 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6vmw\" (UniqueName: \"kubernetes.io/projected/a0a38dd0-b500-4109-a933-2e517ee78cc5-kube-api-access-l6vmw\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694421 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-862x6\" (UniqueName: \"kubernetes.io/projected/bfedb5ab-3262-41f1-967d-845bb0df711e-kube-api-access-862x6\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-webhook-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-service-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694475 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr926\" (UniqueName: \"kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694492 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/352bc8b4-5f52-41c7-894e-42eef4f5b073-trusted-ca\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac6d4617-2a46-45b8-884f-e32e41ca5689-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-images\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694546 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694563 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694588 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-apiservice-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/279ad2a6-b800-4838-a0d6-73ffaaa6f824-cert\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694624 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgp22\" (UniqueName: \"kubernetes.io/projected/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-kube-api-access-bgp22\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694642 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-stats-auth\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694669 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfedb5ab-3262-41f1-967d-845bb0df711e-proxy-tls\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694689 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/702c7842-d8b6-4b21-a805-e994ce070a62-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac6d4617-2a46-45b8-884f-e32e41ca5689-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/efeb33e7-2fb0-4299-8759-4ce51405ce82-tmpfs\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694751 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/352bc8b4-5f52-41c7-894e-42eef4f5b073-metrics-tls\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxbxw\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-kube-api-access-cxbxw\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694820 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfedb5ab-3262-41f1-967d-845bb0df711e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694836 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29gx9\" (UniqueName: \"kubernetes.io/projected/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-kube-api-access-29gx9\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kgzj\" (UniqueName: \"kubernetes.io/projected/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-kube-api-access-5kgzj\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.694888 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cmh2\" (UniqueName: \"kubernetes.io/projected/702c7842-d8b6-4b21-a805-e994ce070a62-kube-api-access-4cmh2\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.695933 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.195914458 +0000 UTC m=+149.034552461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.696591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6d4617-2a46-45b8-884f-e32e41ca5689-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.696687 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-cabundle\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.699501 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-service-ca-bundle\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.710414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-srv-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.711095 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.721987 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.730820 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-default-certificate\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.735577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-profile-collector-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.736233 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-service-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.736519 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-serving-cert\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.736942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-srv-cert\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.737425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-proxy-tls\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.737460 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/702c7842-d8b6-4b21-a805-e994ce070a62-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.740504 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfedb5ab-3262-41f1-967d-845bb0df711e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.740695 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/efeb33e7-2fb0-4299-8759-4ce51405ce82-tmpfs\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.741912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/279ad2a6-b800-4838-a0d6-73ffaaa6f824-cert\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.742601 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-apiservice-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.746725 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-config\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.747264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-images\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.749684 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.752697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-metrics-certs\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.764638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/efeb33e7-2fb0-4299-8759-4ce51405ce82-webhook-cert\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.765027 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0a38dd0-b500-4109-a933-2e517ee78cc5-signing-key\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.765905 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/352bc8b4-5f52-41c7-894e-42eef4f5b073-trusted-ca\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.766150 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-ca\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.766546 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac6d4617-2a46-45b8-884f-e32e41ca5689-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.772393 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.772742 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/702c7842-d8b6-4b21-a805-e994ce070a62-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.774899 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" Jan 22 05:48:07 crc kubenswrapper[4982]: W0122 05:48:07.775239 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-ec8205bed726e3395f902bd4a57d3855475f7b431729e18b7eb39d5aaf5de9c8 WatchSource:0}: Error finding container ec8205bed726e3395f902bd4a57d3855475f7b431729e18b7eb39d5aaf5de9c8: Status 404 returned error can't find the container with id ec8205bed726e3395f902bd4a57d3855475f7b431729e18b7eb39d5aaf5de9c8 Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.775721 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-etcd-client\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.776411 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmwdr\" (UniqueName: \"kubernetes.io/projected/dbc18849-6eed-4691-adf0-38acbf522afb-kube-api-access-fmwdr\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.777771 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfedb5ab-3262-41f1-967d-845bb0df711e-proxy-tls\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.778943 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-stats-auth\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.780414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cmh2\" (UniqueName: \"kubernetes.io/projected/702c7842-d8b6-4b21-a805-e994ce070a62-kube-api-access-4cmh2\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj7hd\" (UID: \"702c7842-d8b6-4b21-a805-e994ce070a62\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.780578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.797615 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.798462 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.298444143 +0000 UTC m=+149.137082146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.803391 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjghz\" (UniqueName: \"kubernetes.io/projected/641c7d26-8c8d-41a7-ad9c-66e9807c7f8e-kube-api-access-bjghz\") pod \"machine-config-operator-74547568cd-4bccq\" (UID: \"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.805725 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6vmw\" (UniqueName: \"kubernetes.io/projected/a0a38dd0-b500-4109-a933-2e517ee78cc5-kube-api-access-l6vmw\") pod \"service-ca-9c57cc56f-v2wz9\" (UID: \"a0a38dd0-b500-4109-a933-2e517ee78cc5\") " pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.806594 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/352bc8b4-5f52-41c7-894e-42eef4f5b073-metrics-tls\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.808211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljc5c\" (UniqueName: \"kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c\") pod \"marketplace-operator-79b997595-xgblw\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.811877 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.815408 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.823239 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q6nd\" (UniqueName: \"kubernetes.io/projected/efeb33e7-2fb0-4299-8759-4ce51405ce82-kube-api-access-4q6nd\") pod \"packageserver-d55dfcdfc-xg47r\" (UID: \"efeb33e7-2fb0-4299-8759-4ce51405ce82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.823592 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dbc18849-6eed-4691-adf0-38acbf522afb-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vh8zk\" (UID: \"dbc18849-6eed-4691-adf0-38acbf522afb\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.844956 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.848814 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkppb\" (UniqueName: \"kubernetes.io/projected/aa891b32-0d9c-4409-a53b-3d12d4e4edbc-kube-api-access-mkppb\") pod \"router-default-5444994796-q5lsn\" (UID: \"aa891b32-0d9c-4409-a53b-3d12d4e4edbc\") " pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.851691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.852827 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgp22\" (UniqueName: \"kubernetes.io/projected/0c85d6b4-ae35-4226-ae70-8ee0f059fb80-kube-api-access-bgp22\") pod \"multus-admission-controller-857f4d67dd-xrrnp\" (UID: \"0c85d6b4-ae35-4226-ae70-8ee0f059fb80\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.885427 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-862x6\" (UniqueName: \"kubernetes.io/projected/bfedb5ab-3262-41f1-967d-845bb0df711e-kube-api-access-862x6\") pod \"machine-config-controller-84d6567774-szxtb\" (UID: \"bfedb5ab-3262-41f1-967d-845bb0df711e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.885842 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ckc67" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.886164 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.899826 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.900000 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.399968801 +0000 UTC m=+149.238606804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.900220 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:07 crc kubenswrapper[4982]: E0122 05:48:07.900590 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.400580787 +0000 UTC m=+149.239218780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.907404 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.910723 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr926\" (UniqueName: \"kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926\") pod \"collect-profiles-29484345-fvh4j\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.942257 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29gx9\" (UniqueName: \"kubernetes.io/projected/0dd095d4-6799-49eb-ac92-d2b8c31b11dc-kube-api-access-29gx9\") pod \"etcd-operator-b45778765-8mjxt\" (UID: \"0dd095d4-6799-49eb-ac92-d2b8c31b11dc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.991807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kgzj\" (UniqueName: \"kubernetes.io/projected/7baa2dd1-c798-4cc6-bd35-d969e2dd0872-kube-api-access-5kgzj\") pod \"catalog-operator-68c6474976-wm77c\" (UID: \"7baa2dd1-c798-4cc6-bd35-d969e2dd0872\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.993017 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxbxw\" (UniqueName: \"kubernetes.io/projected/352bc8b4-5f52-41c7-894e-42eef4f5b073-kube-api-access-cxbxw\") pod \"ingress-operator-5b745b69d9-d9fnw\" (UID: \"352bc8b4-5f52-41c7-894e-42eef4f5b073\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.993145 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs8jn\" (UniqueName: \"kubernetes.io/projected/279ad2a6-b800-4838-a0d6-73ffaaa6f824-kube-api-access-fs8jn\") pod \"ingress-canary-knbtr\" (UID: \"279ad2a6-b800-4838-a0d6-73ffaaa6f824\") " pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:07 crc kubenswrapper[4982]: I0122 05:48:07.997729 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.001037 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.001491 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.501478709 +0000 UTC m=+149.340116712 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.005170 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.005751 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" event={"ID":"ff102720-f36a-467b-8ae8-b9a637d6f34a","Type":"ContainerStarted","Data":"c1e106cb98cf1032ca763efc9d849340f53a4ed9859242ca62e64953fbda34ca"} Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.007407 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac6d4617-2a46-45b8-884f-e32e41ca5689-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-l69z9\" (UID: \"ac6d4617-2a46-45b8-884f-e32e41ca5689\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.014753 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.057249 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ec8205bed726e3395f902bd4a57d3855475f7b431729e18b7eb39d5aaf5de9c8"} Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.057673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.059627 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.069890 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.083930 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.091791 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.100875 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.103882 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.132999 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.632976152 +0000 UTC m=+149.471614155 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.142195 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.207646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.208119 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.70810174 +0000 UTC m=+149.546739743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.210823 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-knbtr" Jan 22 05:48:08 crc kubenswrapper[4982]: W0122 05:48:08.254111 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda24eabd9_f7ca_4ea9_9095_3086e431730a.slice/crio-3036675bb79263ab09898f57239937704eb23413363ad91c887638090ac50739 WatchSource:0}: Error finding container 3036675bb79263ab09898f57239937704eb23413363ad91c887638090ac50739: Status 404 returned error can't find the container with id 3036675bb79263ab09898f57239937704eb23413363ad91c887638090ac50739 Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.254146 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.311873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.311919 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.312292 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.812278277 +0000 UTC m=+149.650916280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.344912 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.368746 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" podStartSLOduration=127.368722905 podStartE2EDuration="2m7.368722905s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.272867726 +0000 UTC m=+149.111505729" watchObservedRunningTime="2026-01-22 05:48:08.368722905 +0000 UTC m=+149.207360908" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.394109 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f-encryption-config\") pod \"apiserver-76f77b778f-t8h5w\" (UID: \"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f\") " pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.454647 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.455280 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:08.955264101 +0000 UTC m=+149.793902094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.514398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.532373 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-wb9ck" podStartSLOduration=126.53235322 podStartE2EDuration="2m6.53235322s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.531454206 +0000 UTC m=+149.370092219" watchObservedRunningTime="2026-01-22 05:48:08.53235322 +0000 UTC m=+149.370991223" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.558000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.558404 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.058389172 +0000 UTC m=+149.897027175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.614707 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" podStartSLOduration=126.614688226 podStartE2EDuration="2m6.614688226s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.584915116 +0000 UTC m=+149.423553149" watchObservedRunningTime="2026-01-22 05:48:08.614688226 +0000 UTC m=+149.453326229" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.616484 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-bchfc"] Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.665012 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.665244 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.165218629 +0000 UTC m=+150.003856632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.665442 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.665822 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.165810534 +0000 UTC m=+150.004448537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.689515 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.729102 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-dwszh" podStartSLOduration=126.729079732 podStartE2EDuration="2m6.729079732s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.694611589 +0000 UTC m=+149.533249592" watchObservedRunningTime="2026-01-22 05:48:08.729079732 +0000 UTC m=+149.567717735" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.767789 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.768778 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.26875583 +0000 UTC m=+150.107393833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.849346 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" podStartSLOduration=126.84933077 podStartE2EDuration="2m6.84933077s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.791495816 +0000 UTC m=+149.630133819" watchObservedRunningTime="2026-01-22 05:48:08.84933077 +0000 UTC m=+149.687968773" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.875081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.875369 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.375350171 +0000 UTC m=+150.213988174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.935137 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" podStartSLOduration=126.935122406 podStartE2EDuration="2m6.935122406s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:08.928610016 +0000 UTC m=+149.767248019" watchObservedRunningTime="2026-01-22 05:48:08.935122406 +0000 UTC m=+149.773760409" Jan 22 05:48:08 crc kubenswrapper[4982]: I0122 05:48:08.979736 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:08 crc kubenswrapper[4982]: E0122 05:48:08.980336 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.48030501 +0000 UTC m=+150.318943013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.085804 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.086127 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.58611532 +0000 UTC m=+150.424753313 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.129335 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ab2a5924baff254b35bf82a5ea6a1ae20ad63dabd8ccf980745c8d7ca936d053"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.162118 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.174179 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b2ee050492e96105fbbb0869461af1a70976741bd88514440ae5ae0afe1d3bd0"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.174227 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"fad36c8e4e227ed9223856bad4384b74029e24ed91828590301b7d752a3c63af"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.175008 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.190430 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lmpt5"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.190891 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.191337 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.691320885 +0000 UTC m=+150.529958888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.197909 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-q5lsn" event={"ID":"aa891b32-0d9c-4409-a53b-3d12d4e4edbc","Type":"ContainerStarted","Data":"ee70154b05912f6863cd9154bd98ad84381d0164b6cdff6e2c61dc663da55e7f"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.217378 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ckc67" event={"ID":"a24eabd9-f7ca-4ea9-9095-3086e431730a","Type":"ContainerStarted","Data":"b48591119112d54f98b43bc1ecfafbf06ff090725287b0a53ead19433ef42535"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.217419 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ckc67" event={"ID":"a24eabd9-f7ca-4ea9-9095-3086e431730a","Type":"ContainerStarted","Data":"3036675bb79263ab09898f57239937704eb23413363ad91c887638090ac50739"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.242204 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s"] Jan 22 05:48:09 crc kubenswrapper[4982]: W0122 05:48:09.243420 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e394150_d9e9_43f0_99ae_036e0e8de268.slice/crio-4ec86e3edc6be26fb02f351d98f9e685f1e3a1f813ab5d74cdf5573a9bdc2f99 WatchSource:0}: Error finding container 4ec86e3edc6be26fb02f351d98f9e685f1e3a1f813ab5d74cdf5573a9bdc2f99: Status 404 returned error can't find the container with id 4ec86e3edc6be26fb02f351d98f9e685f1e3a1f813ab5d74cdf5573a9bdc2f99 Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.295534 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.296503 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bchfc" event={"ID":"83eac823-fad9-4de9-8f32-97bb096128c7","Type":"ContainerStarted","Data":"03123e7d5c060566ef9bafe148a1ea0e4d07a83060679d21700fee5fb8429281"} Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.297055 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.797026963 +0000 UTC m=+150.635664966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.359363 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"55bd48c3919d87921ded3dc0a9205411f2292e76d56b57ab7b7d62128b5dc1f3"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.359451 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ba2bd5cc3fdffec208c6a2742abcb6a48ef3746825597b8f5e8cfd695eb2e7c2"} Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.360307 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.375621 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.378085 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-j82cn" podStartSLOduration=127.378030345 podStartE2EDuration="2m7.378030345s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.376185566 +0000 UTC m=+150.214823589" watchObservedRunningTime="2026-01-22 05:48:09.378030345 +0000 UTC m=+150.216668358" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.407592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.407950 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:09.907935787 +0000 UTC m=+150.746573790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.426966 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.481202 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.509210 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.509538 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.009525767 +0000 UTC m=+150.848163770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.534399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.535927 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cw47t" podStartSLOduration=127.535907998 podStartE2EDuration="2m7.535907998s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.530779944 +0000 UTC m=+150.369417967" watchObservedRunningTime="2026-01-22 05:48:09.535907998 +0000 UTC m=+150.374546001" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.566690 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4sqvk" podStartSLOduration=127.566657153 podStartE2EDuration="2m7.566657153s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.559105455 +0000 UTC m=+150.397743468" watchObservedRunningTime="2026-01-22 05:48:09.566657153 +0000 UTC m=+150.405295176" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.572819 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.581267 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-frnjg" podStartSLOduration=128.581238715 podStartE2EDuration="2m8.581238715s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.577403985 +0000 UTC m=+150.416042018" watchObservedRunningTime="2026-01-22 05:48:09.581238715 +0000 UTC m=+150.419876708" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.610781 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.612233 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.112212216 +0000 UTC m=+150.950850219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.612294 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.612620 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.112608916 +0000 UTC m=+150.951246919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.619429 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fqnps" podStartSLOduration=128.619398505 podStartE2EDuration="2m8.619398505s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.610406088 +0000 UTC m=+150.449044091" watchObservedRunningTime="2026-01-22 05:48:09.619398505 +0000 UTC m=+150.458036508" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.657776 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2rfct" podStartSLOduration=127.657740058 podStartE2EDuration="2m7.657740058s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.655684165 +0000 UTC m=+150.494322168" watchObservedRunningTime="2026-01-22 05:48:09.657740058 +0000 UTC m=+150.496378061" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.680915 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bv2fn" podStartSLOduration=128.680898445 podStartE2EDuration="2m8.680898445s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.677240699 +0000 UTC m=+150.515878712" watchObservedRunningTime="2026-01-22 05:48:09.680898445 +0000 UTC m=+150.519536458" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.713606 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.714147 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.214061063 +0000 UTC m=+151.052699076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.715300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.715649 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.215630014 +0000 UTC m=+151.054268017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.756594 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-5wl4v" podStartSLOduration=127.756572417 podStartE2EDuration="2m7.756572417s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.737413425 +0000 UTC m=+150.576051428" watchObservedRunningTime="2026-01-22 05:48:09.756572417 +0000 UTC m=+150.595210440" Jan 22 05:48:09 crc kubenswrapper[4982]: W0122 05:48:09.791443 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc703594d_da70_4260_a7a8_dd6b0ebf270a.slice/crio-99d2a1b527ae9178051520c5e66d34c1ddf1d6149120a3a7d32cf895610ee1f2 WatchSource:0}: Error finding container 99d2a1b527ae9178051520c5e66d34c1ddf1d6149120a3a7d32cf895610ee1f2: Status 404 returned error can't find the container with id 99d2a1b527ae9178051520c5e66d34c1ddf1d6149120a3a7d32cf895610ee1f2 Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.819633 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.819970 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.319953906 +0000 UTC m=+151.158591909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.825684 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-ckc67" podStartSLOduration=5.825670935 podStartE2EDuration="5.825670935s" podCreationTimestamp="2026-01-22 05:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.784127717 +0000 UTC m=+150.622765720" watchObservedRunningTime="2026-01-22 05:48:09.825670935 +0000 UTC m=+150.664308938" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.826275 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-q5lsn" podStartSLOduration=127.826272242 podStartE2EDuration="2m7.826272242s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:09.821797864 +0000 UTC m=+150.660435867" watchObservedRunningTime="2026-01-22 05:48:09.826272242 +0000 UTC m=+150.664910245" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.927184 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:09 crc kubenswrapper[4982]: E0122 05:48:09.927670 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.427655416 +0000 UTC m=+151.266293419 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.978133 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:09 crc kubenswrapper[4982]: I0122 05:48:09.983372 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.031127 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.031476 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.531456744 +0000 UTC m=+151.370094747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.085454 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.096070 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:10 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:10 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:10 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.096139 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.128802 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.132480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.132818 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.632804978 +0000 UTC m=+151.471442981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.233829 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.234399 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.734370627 +0000 UTC m=+151.573008620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.234541 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.236779 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.73675925 +0000 UTC m=+151.575397243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.269479 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.271943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.305070 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8mjxt"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.335369 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.335631 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.835596457 +0000 UTC m=+151.674234460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.335837 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.336220 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.836205714 +0000 UTC m=+151.674843717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.359386 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" event={"ID":"9e0822f5-3b74-4cf5-93f7-4baa345bb385","Type":"ContainerStarted","Data":"233a867de7b74b3a136505b735af0f13691a36e72e305585877e66605160ce9c"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.363129 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" event={"ID":"75351d44-1e40-47f5-b156-159b8ae57252","Type":"ContainerStarted","Data":"b17b6b0f22fe262834974f873141e2a2da01162de6b0fd21077dff27218e1e7b"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.363170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" event={"ID":"75351d44-1e40-47f5-b156-159b8ae57252","Type":"ContainerStarted","Data":"011ae1dfadb096ae0d609ff1423ba1f573bad19fc7158ed47d08de5374fb8c29"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.372835 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.383725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" event={"ID":"b3854c73-a5eb-4db0-8f25-ecdf90993761","Type":"ContainerStarted","Data":"377086b49c983b259442fa1c81950fcac3fbb2aaae0a1c044e59d12e5ba25d56"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.384010 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" event={"ID":"b3854c73-a5eb-4db0-8f25-ecdf90993761","Type":"ContainerStarted","Data":"412c3e3a46222e9ba6f3e1e20451d2635cc22ec389e0171cdd63ccd74ce25dc0"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.384975 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.386759 4982 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xgblw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.398168 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.400119 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vzjlc"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.403888 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.419318 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.428922 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lm8gn" podStartSLOduration=128.428896101 podStartE2EDuration="2m8.428896101s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:10.413376444 +0000 UTC m=+151.252014447" watchObservedRunningTime="2026-01-22 05:48:10.428896101 +0000 UTC m=+151.267534104" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.436528 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" event={"ID":"4e394150-d9e9-43f0-99ae-036e0e8de268","Type":"ContainerStarted","Data":"240a462b99c51c4626b8ca64895ef96adfc4ad7fb7786782cdad6c87e4f38f91"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.436594 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" event={"ID":"4e394150-d9e9-43f0-99ae-036e0e8de268","Type":"ContainerStarted","Data":"4ec86e3edc6be26fb02f351d98f9e685f1e3a1f813ab5d74cdf5573a9bdc2f99"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.437805 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.438319 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.938290437 +0000 UTC m=+151.776928430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.438441 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.440079 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:10.940065104 +0000 UTC m=+151.778703107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.446534 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-knbtr"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.449506 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xrrnp"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.452294 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" podStartSLOduration=128.452272123 podStartE2EDuration="2m8.452272123s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:10.451194035 +0000 UTC m=+151.289832038" watchObservedRunningTime="2026-01-22 05:48:10.452272123 +0000 UTC m=+151.290910126" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.479908 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" event={"ID":"c1c37351-6524-4846-82a5-5174a5254291","Type":"ContainerStarted","Data":"d2e6ff5a75290f9fc519e237fd98c03126cc691a4bec621cbf42cae0360691ad"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.479957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" event={"ID":"c1c37351-6524-4846-82a5-5174a5254291","Type":"ContainerStarted","Data":"e3602d8137f7414be1cb27d0b4baeea1996f1b10f5c1945225ebdcffc2162254"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.537017 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.549688 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" event={"ID":"c703594d-da70-4260-a7a8-dd6b0ebf270a","Type":"ContainerStarted","Data":"c7344da9a8d4de5f4e4c3ecfc6311dde02d277922ff73b27ad783fead5143178"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.549759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" event={"ID":"c703594d-da70-4260-a7a8-dd6b0ebf270a","Type":"ContainerStarted","Data":"99d2a1b527ae9178051520c5e66d34c1ddf1d6149120a3a7d32cf895610ee1f2"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.554526 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.556409 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jtzgr" podStartSLOduration=128.556388699 podStartE2EDuration="2m8.556388699s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:10.537834474 +0000 UTC m=+151.376472497" watchObservedRunningTime="2026-01-22 05:48:10.556388699 +0000 UTC m=+151.395026702" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.562467 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.570554 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.063883466 +0000 UTC m=+151.902521479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.571723 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-q5lsn" event={"ID":"aa891b32-0d9c-4409-a53b-3d12d4e4edbc","Type":"ContainerStarted","Data":"c53b3acc87eeaed7d2a4972e724038b863b62694d5794603991b43e6523cebdf"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.575999 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.621010 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-v2wz9"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.640749 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" event={"ID":"702c7842-d8b6-4b21-a805-e994ce070a62","Type":"ContainerStarted","Data":"0849811322c7cf8d35f53a6582beacae58a8fc64327906cd120ea56eea3f4674"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.640820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" event={"ID":"702c7842-d8b6-4b21-a805-e994ce070a62","Type":"ContainerStarted","Data":"a85d78687e91ed4985dfed4010e8661c65bdf4a6bba88ca81574707015de0732"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.665518 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.665823 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.165812314 +0000 UTC m=+152.004450307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.688587 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.694201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bchfc" event={"ID":"83eac823-fad9-4de9-8f32-97bb096128c7","Type":"ContainerStarted","Data":"796506cae7c09a8b0d98fdb81ad33488c817cdda2c439e24eddc92130fd4cc29"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.701430 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj7hd" podStartSLOduration=128.701398406 podStartE2EDuration="2m8.701398406s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:10.685601462 +0000 UTC m=+151.524239475" watchObservedRunningTime="2026-01-22 05:48:10.701398406 +0000 UTC m=+151.540036409" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.720719 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t8h5w"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.740224 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" event={"ID":"1001b154-4839-4c44-a79b-2be8fcbfb706","Type":"ContainerStarted","Data":"23260cfebccf3a5aacbd5a2ceec0c5dc0f5eada68d0fa19adffadc3fbb5cea7b"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.742762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" event={"ID":"343b42fa-997d-498f-ade5-4fefffd26aba","Type":"ContainerStarted","Data":"5d2e561fbe7f8690712932371678fb58d97830db562acffd349f6d3c3ef22495"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.742790 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" event={"ID":"343b42fa-997d-498f-ade5-4fefffd26aba","Type":"ContainerStarted","Data":"7f4b1086c46ac6b1c9df36c6a168c27afa06294b1b052096663fb8d517225ab8"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.764127 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r"] Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.766555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.767976 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.267958739 +0000 UTC m=+152.106596742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.777370 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" event={"ID":"b89f2578-d195-48ea-aa15-009d36b7f6d9","Type":"ContainerStarted","Data":"48415f234e2f24c5efb2fc6b62c1eb2d3404f4e82456c5ddb28eb0ad4705fe26"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.784876 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" event={"ID":"0dd095d4-6799-49eb-ac92-d2b8c31b11dc","Type":"ContainerStarted","Data":"2ec3387534ef2dfb33f1a9e8be6dcab4d7c8865a1662dcf9aa7f4dd7f32678bc"} Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.791899 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kn79s" podStartSLOduration=128.791866735 podStartE2EDuration="2m8.791866735s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:10.782764277 +0000 UTC m=+151.621402300" watchObservedRunningTime="2026-01-22 05:48:10.791866735 +0000 UTC m=+151.630504738" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.797050 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-sjh6j" Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.869826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.877771 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.377752544 +0000 UTC m=+152.216390547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:10 crc kubenswrapper[4982]: I0122 05:48:10.971960 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:10 crc kubenswrapper[4982]: E0122 05:48:10.972541 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.472517786 +0000 UTC m=+152.311155799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.075499 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.075914 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.575897803 +0000 UTC m=+152.414535806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.106180 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:11 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:11 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:11 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.107455 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.179636 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.679608468 +0000 UTC m=+152.518246471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.179513 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.181158 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.181740 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.681715513 +0000 UTC m=+152.520353516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.282931 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.283345 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.783325444 +0000 UTC m=+152.621963447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.384562 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.385484 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.885469758 +0000 UTC m=+152.724107771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.494484 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.494806 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:11.994788071 +0000 UTC m=+152.833426084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.596058 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.596685 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.096668908 +0000 UTC m=+152.935306911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.698530 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.699136 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.199108531 +0000 UTC m=+153.037746534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.800506 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.801577 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.301526623 +0000 UTC m=+153.140164626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.839621 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" event={"ID":"1001b154-4839-4c44-a79b-2be8fcbfb706","Type":"ContainerStarted","Data":"8e130919eead2891b0fea0096c7881ab0a5b06e34350ddb6b8414a6972cb4f01"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.842444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" event={"ID":"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e","Type":"ContainerStarted","Data":"f7bef58f67138597a89dcff2d3b50c4911e1d6b889593afe81bd6deb3d94f887"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.842505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" event={"ID":"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e","Type":"ContainerStarted","Data":"14c14814156ad6674ce92dfeb6b967a58069e8504b1b2251a666d43d62cf9f7b"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.855552 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" event={"ID":"9e0822f5-3b74-4cf5-93f7-4baa345bb385","Type":"ContainerStarted","Data":"064a58631fbc437ff256f17609a6f7d004d5fcd31d9405bff10d2b0f83217f74"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.880494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" event={"ID":"efeb33e7-2fb0-4299-8759-4ce51405ce82","Type":"ContainerStarted","Data":"f3eedf87df7553da5e418e3daaf08d77184814cbf90ba327a0a2cddd588fafff"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.880564 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" event={"ID":"efeb33e7-2fb0-4299-8759-4ce51405ce82","Type":"ContainerStarted","Data":"3f0419cebb76139a7e7625b90682d4ec82e1e259e4152c32e7d9c49209c4f53d"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.881671 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.884312 4982 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xg47r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" start-of-body= Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.884381 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" podUID="efeb33e7-2fb0-4299-8759-4ce51405ce82" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.897180 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6zct7" podStartSLOduration=129.897156777 podStartE2EDuration="2m9.897156777s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:11.893384517 +0000 UTC m=+152.732022530" watchObservedRunningTime="2026-01-22 05:48:11.897156777 +0000 UTC m=+152.735794780" Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.905063 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:11 crc kubenswrapper[4982]: E0122 05:48:11.906289 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.406269775 +0000 UTC m=+153.244907778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.974050 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" event={"ID":"bf158f64-3928-408e-bd28-19eef4de1e71","Type":"ContainerStarted","Data":"4bfff489f69e30973ec4ca279718bcce8e02e17c8c85952c4e3bf32ef4a76c98"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.974108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" event={"ID":"bf158f64-3928-408e-bd28-19eef4de1e71","Type":"ContainerStarted","Data":"25169ac82273c63aa82f6f33fad5791c8b2d9300e6d66adb616abbe27d566121"} Jan 22 05:48:11 crc kubenswrapper[4982]: I0122 05:48:11.974725 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" podStartSLOduration=129.974679746 podStartE2EDuration="2m9.974679746s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:11.973460234 +0000 UTC m=+152.812098237" watchObservedRunningTime="2026-01-22 05:48:11.974679746 +0000 UTC m=+152.813317749" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.006873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.007377 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.507361032 +0000 UTC m=+153.345999035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.016453 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" event={"ID":"7baa2dd1-c798-4cc6-bd35-d969e2dd0872","Type":"ContainerStarted","Data":"8ea4b85eec621c0c1238c5b74d781a01558d68b1bde2b0b0b4a6e774b949404a"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.016624 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" event={"ID":"7baa2dd1-c798-4cc6-bd35-d969e2dd0872","Type":"ContainerStarted","Data":"c47b0269205edf84f70237a2c9a043f9babf6ad6e560ff0adc1d7f99c328e2ad"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.017868 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.022003 4982 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wm77c container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.022094 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" podUID="7baa2dd1-c798-4cc6-bd35-d969e2dd0872" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.024212 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" event={"ID":"ac6d4617-2a46-45b8-884f-e32e41ca5689","Type":"ContainerStarted","Data":"a9b480b99fb10ecda5c5fb441ff55c0550b28f8bc89a5b9dbd77ecc348fb2d31"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.024722 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" event={"ID":"ac6d4617-2a46-45b8-884f-e32e41ca5689","Type":"ContainerStarted","Data":"54851f09f42965cb37ef861a854215a51a363b166017fd5d3d60b4ef612b36e7"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.033004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" event={"ID":"dbc18849-6eed-4691-adf0-38acbf522afb","Type":"ContainerStarted","Data":"41faa7c49d3c357a1f4ec55fa1109505b02e9d661390bbdcc63eed1119d2f653"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.033100 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" event={"ID":"dbc18849-6eed-4691-adf0-38acbf522afb","Type":"ContainerStarted","Data":"f6e5779e9fa4c93989f584b4bbf982c578ddb30401d7e2b3fa8f5f847d74e262"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.034193 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.035153 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" event={"ID":"352bc8b4-5f52-41c7-894e-42eef4f5b073","Type":"ContainerStarted","Data":"e87a2855addb75e1f684c87eb93a2a47115acdff9ee775aa61982ab28ad1e31f"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.035198 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" event={"ID":"352bc8b4-5f52-41c7-894e-42eef4f5b073","Type":"ContainerStarted","Data":"25430cc07fa6644cab4b910921d232ff6dee2ebd71670254877711f0993a5a6d"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.036978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" event={"ID":"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f","Type":"ContainerStarted","Data":"4060fd03f3f1f692d6d81ce29ea65b0324d9000ef73153738ae11766f9ebb2ce"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.041171 4982 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-vh8zk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.041251 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" podUID="dbc18849-6eed-4691-adf0-38acbf522afb" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.044104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" event={"ID":"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf","Type":"ContainerStarted","Data":"d6457f592a30939a148971496a64879ea48cdf141d58e9a64d5571c0020e1282"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.044140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" event={"ID":"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf","Type":"ContainerStarted","Data":"ef81488334181c61f6f67f8c2d59eb858dea46683c8da618558c1293e0188943"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.058307 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" podStartSLOduration=130.058283815 podStartE2EDuration="2m10.058283815s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.053960353 +0000 UTC m=+152.892598366" watchObservedRunningTime="2026-01-22 05:48:12.058283815 +0000 UTC m=+152.896921818" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.095660 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" podStartSLOduration=130.095639654 podStartE2EDuration="2m10.095639654s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.09359251 +0000 UTC m=+152.932230523" watchObservedRunningTime="2026-01-22 05:48:12.095639654 +0000 UTC m=+152.934277657" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.097887 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-knbtr" event={"ID":"279ad2a6-b800-4838-a0d6-73ffaaa6f824","Type":"ContainerStarted","Data":"58f5b1e878bb44e1191e0148d68f2422adff7ed468cdf093da763300d7817e44"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.097950 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-knbtr" event={"ID":"279ad2a6-b800-4838-a0d6-73ffaaa6f824","Type":"ContainerStarted","Data":"5629071e5a5f2247b75de51caca37e8156c2eb395da4815b1943d19b9750fc76"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.104773 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:12 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:12 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:12 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.105139 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.107189 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" event={"ID":"bfedb5ab-3262-41f1-967d-845bb0df711e","Type":"ContainerStarted","Data":"c6a18e79cef86b517d5db623a6a65fe0b056ca08c939b56f8ec3eebf51ac11b1"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.107255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" event={"ID":"bfedb5ab-3262-41f1-967d-845bb0df711e","Type":"ContainerStarted","Data":"3795eb84d7ab01b5cd76ea43c5c9c194778d2c808b32e1ef981a36606873d031"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.107872 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.108504 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.608467909 +0000 UTC m=+153.447105912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.116377 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.117689 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.61766442 +0000 UTC m=+153.456302423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.135308 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-l69z9" podStartSLOduration=130.135278262 podStartE2EDuration="2m10.135278262s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.124544111 +0000 UTC m=+152.963182114" watchObservedRunningTime="2026-01-22 05:48:12.135278262 +0000 UTC m=+152.973916265" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.164978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" event={"ID":"0c85d6b4-ae35-4226-ae70-8ee0f059fb80","Type":"ContainerStarted","Data":"4831999ba744b7eb10ee2550c13daa329e549de061c30f8dc1ead301df7c3568"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.165053 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" event={"ID":"0c85d6b4-ae35-4226-ae70-8ee0f059fb80","Type":"ContainerStarted","Data":"1043183816d6ddc4913d529cce54d79171edcf332532656a04e3f07112ce1664"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.167540 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" podStartSLOduration=131.167522406 podStartE2EDuration="2m11.167522406s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.164578209 +0000 UTC m=+153.003216212" watchObservedRunningTime="2026-01-22 05:48:12.167522406 +0000 UTC m=+153.006160409" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.224499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.225125 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.725078123 +0000 UTC m=+153.563716136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.242264 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.242949 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.74293761 +0000 UTC m=+153.581575613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.260104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" event={"ID":"0dd095d4-6799-49eb-ac92-d2b8c31b11dc","Type":"ContainerStarted","Data":"ac71b166f6d181519e11cd23187647991d550d4c7cccbd44e2ce938f7782295d"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.302912 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-knbtr" podStartSLOduration=8.302885411 podStartE2EDuration="8.302885411s" podCreationTimestamp="2026-01-22 05:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.206954198 +0000 UTC m=+153.045592221" watchObservedRunningTime="2026-01-22 05:48:12.302885411 +0000 UTC m=+153.141523414" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.311586 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-8mjxt" podStartSLOduration=130.311557098 podStartE2EDuration="2m10.311557098s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.308709953 +0000 UTC m=+153.147347956" watchObservedRunningTime="2026-01-22 05:48:12.311557098 +0000 UTC m=+153.150195121" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.348390 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.350127 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.850096246 +0000 UTC m=+153.688734239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.359899 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-bchfc" event={"ID":"83eac823-fad9-4de9-8f32-97bb096128c7","Type":"ContainerStarted","Data":"a4813be31a821dfcac646c5cab3c8d4da0492acbb9c9ada1306ad4a9de42f12f"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.360262 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.392582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" event={"ID":"a0a38dd0-b500-4109-a933-2e517ee78cc5","Type":"ContainerStarted","Data":"2e4c5d4ea2a63af22987e4dadccf15796a3c559ec44ff01bcc202287d57539e0"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.408534 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" event={"ID":"b89f2578-d195-48ea-aa15-009d36b7f6d9","Type":"ContainerStarted","Data":"b00970313271356ca2e35d5a207ef64257f81e4673d42bf71679535402b598eb"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.412485 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" event={"ID":"c703594d-da70-4260-a7a8-dd6b0ebf270a","Type":"ContainerStarted","Data":"eae34b6560b32bbe1243d2d44a51fd6c37b3e0692c9895cb0ab6aa7e785b8d6f"} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.412678 4982 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xgblw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.412731 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.428623 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-bchfc" podStartSLOduration=8.428602982 podStartE2EDuration="8.428602982s" podCreationTimestamp="2026-01-22 05:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.42738405 +0000 UTC m=+153.266022053" watchObservedRunningTime="2026-01-22 05:48:12.428602982 +0000 UTC m=+153.267240985" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.450942 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.456472 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:12.956442751 +0000 UTC m=+153.795080924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.468249 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6d42n" podStartSLOduration=130.46822613 podStartE2EDuration="2m10.46822613s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.46554131 +0000 UTC m=+153.304179313" watchObservedRunningTime="2026-01-22 05:48:12.46822613 +0000 UTC m=+153.306864133" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.530163 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b7nkb" podStartSLOduration=130.530129671 podStartE2EDuration="2m10.530129671s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.519969744 +0000 UTC m=+153.358607747" watchObservedRunningTime="2026-01-22 05:48:12.530129671 +0000 UTC m=+153.368767674" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.556401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.556844 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.056824069 +0000 UTC m=+153.895462072 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.559351 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" podStartSLOduration=130.559332415 podStartE2EDuration="2m10.559332415s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:12.556673975 +0000 UTC m=+153.395311978" watchObservedRunningTime="2026-01-22 05:48:12.559332415 +0000 UTC m=+153.397970408" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.658555 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.658993 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.158974675 +0000 UTC m=+153.997612668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.760115 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.760318 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.260287287 +0000 UTC m=+154.098925290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.760510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.761553 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.261535259 +0000 UTC m=+154.100173262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.804954 4982 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.862360 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.862521 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.362490583 +0000 UTC m=+154.201128586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.862764 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:12 crc kubenswrapper[4982]: E0122 05:48:12.863137 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 05:48:13.36312222 +0000 UTC m=+154.201760223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-g89zh" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.904890 4982 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-22T05:48:12.804992048Z","Handler":null,"Name":""} Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.907429 4982 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.907479 4982 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.964414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 05:48:12 crc kubenswrapper[4982]: I0122 05:48:12.974612 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.071662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.091500 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:13 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:13 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:13 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.091581 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.119970 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.120031 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.345683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-g89zh\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.421494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" event={"ID":"bfedb5ab-3262-41f1-967d-845bb0df711e","Type":"ContainerStarted","Data":"f48b1e573c3dbad7a5f3a37a326a607f33a83dde8b18a4fde0aed5008789b18c"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.423426 4982 generic.go:334] "Generic (PLEG): container finished" podID="3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f" containerID="cf560f7fd9fbd36c826f814b50e29b21b2571e99d24bfff17dff62f03abfd106" exitCode=0 Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.423489 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" event={"ID":"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f","Type":"ContainerDied","Data":"cf560f7fd9fbd36c826f814b50e29b21b2571e99d24bfff17dff62f03abfd106"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.424797 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-v2wz9" event={"ID":"a0a38dd0-b500-4109-a933-2e517ee78cc5","Type":"ContainerStarted","Data":"d48cc869cf815bea7f66dbbd34593573f9fe28257ebda2b8f754ea96d3655319"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.427049 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" event={"ID":"641c7d26-8c8d-41a7-ad9c-66e9807c7f8e","Type":"ContainerStarted","Data":"c3c88e3957635f6579ac9ba3d6d8595633fe7fd5e7b21ddb497218bec6792a45"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.429740 4982 generic.go:334] "Generic (PLEG): container finished" podID="84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" containerID="d6457f592a30939a148971496a64879ea48cdf141d58e9a64d5571c0020e1282" exitCode=0 Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.429834 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" event={"ID":"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf","Type":"ContainerDied","Data":"d6457f592a30939a148971496a64879ea48cdf141d58e9a64d5571c0020e1282"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.432299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" event={"ID":"352bc8b4-5f52-41c7-894e-42eef4f5b073","Type":"ContainerStarted","Data":"0926bb28ec95a9c266f0f212a0934ba67fb48f0ea0b1e609f828c613edb8f0b4"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.434599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" event={"ID":"9e0822f5-3b74-4cf5-93f7-4baa345bb385","Type":"ContainerStarted","Data":"cd24d9d91a6cf999aa87acd62b3b5e498f0233bfe36b2f59127fa990f0885a86"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.434790 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.437037 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" event={"ID":"4e394150-d9e9-43f0-99ae-036e0e8de268","Type":"ContainerStarted","Data":"1ab886fd5b451b75520621da4ecc3ed4f3ef4675b2549f25cbfaed1e36f3a2ec"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.437103 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" event={"ID":"4e394150-d9e9-43f0-99ae-036e0e8de268","Type":"ContainerStarted","Data":"8c74d121d9edab2eb23e46ccc3d21bb8eaf24358c4a041388396dcb0b33bd5e5"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.441220 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" event={"ID":"0c85d6b4-ae35-4226-ae70-8ee0f059fb80","Type":"ContainerStarted","Data":"88e51ed470b1cb7b7152e4cc1996f5179d4220b3800a404b4f89ef025cea2077"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.442469 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-szxtb" podStartSLOduration=131.442455829 podStartE2EDuration="2m11.442455829s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.442077559 +0000 UTC m=+154.280715562" watchObservedRunningTime="2026-01-22 05:48:13.442455829 +0000 UTC m=+154.281093832" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.456222 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" event={"ID":"bf158f64-3928-408e-bd28-19eef4de1e71","Type":"ContainerStarted","Data":"05183b5e9b3ee46a11c9f74c1e00a777c0f2f933a7ac1231f4f7b2f3bdb7b84c"} Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.456438 4982 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xgblw container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.456481 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.508025 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vh8zk" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.519075 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wm77c" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.547205 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-xrrnp" podStartSLOduration=131.547180942 podStartE2EDuration="2m11.547180942s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.546673579 +0000 UTC m=+154.385311582" watchObservedRunningTime="2026-01-22 05:48:13.547180942 +0000 UTC m=+154.385818945" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.570648 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.700883 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" podStartSLOduration=131.700859476 podStartE2EDuration="2m11.700859476s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.672674338 +0000 UTC m=+154.511312341" watchObservedRunningTime="2026-01-22 05:48:13.700859476 +0000 UTC m=+154.539497479" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.739596 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.752992 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4bccq" podStartSLOduration=131.75296962 podStartE2EDuration="2m11.75296962s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.702765615 +0000 UTC m=+154.541403618" watchObservedRunningTime="2026-01-22 05:48:13.75296962 +0000 UTC m=+154.591607623" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.753105 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d9fnw" podStartSLOduration=131.753101313 podStartE2EDuration="2m11.753101313s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.751992485 +0000 UTC m=+154.590630508" watchObservedRunningTime="2026-01-22 05:48:13.753101313 +0000 UTC m=+154.591739316" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.854928 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-vzjlc" podStartSLOduration=131.85490543 podStartE2EDuration="2m11.85490543s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:13.85150846 +0000 UTC m=+154.690146463" watchObservedRunningTime="2026-01-22 05:48:13.85490543 +0000 UTC m=+154.693543433" Jan 22 05:48:13 crc kubenswrapper[4982]: I0122 05:48:13.865516 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xg47r" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.114150 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:14 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:14 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:14 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.114657 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.376710 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.468811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" event={"ID":"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6","Type":"ContainerStarted","Data":"6b92ad6c28fdfc5ba440166f69d35f9724510eae7b7b7600d4eb86a93de7fb1a"} Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.485254 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" event={"ID":"4e394150-d9e9-43f0-99ae-036e0e8de268","Type":"ContainerStarted","Data":"e6c6ba0fac5cd980357704aa9b034f8dbce2ce2caca10d803b2378d26debb7f8"} Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.493365 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" event={"ID":"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f","Type":"ContainerStarted","Data":"d07bdee5c75e7eea6162fc362518d715ecbaf72c1b473416eaad0c62f8881c01"} Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.514338 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-lmpt5" podStartSLOduration=10.514315646 podStartE2EDuration="10.514315646s" podCreationTimestamp="2026-01-22 05:48:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:14.513623898 +0000 UTC m=+155.352261901" watchObservedRunningTime="2026-01-22 05:48:14.514315646 +0000 UTC m=+155.352953649" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.761167 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.764901 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.771327 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.781902 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.813181 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc8sn\" (UniqueName: \"kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.813548 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.813649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.823617 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.901430 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-j82cn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.901523 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-j82cn" podUID="d07c1ac4-7087-47d2-b0d1-c3b840d4678a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.901427 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-j82cn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.901602 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-j82cn" podUID="d07c1ac4-7087-47d2-b0d1-c3b840d4678a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.915908 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr926\" (UniqueName: \"kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926\") pod \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.916341 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume\") pod \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.916482 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume\") pod \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\" (UID: \"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf\") " Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.916670 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc8sn\" (UniqueName: \"kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.916782 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.916892 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.917807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.917903 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.917963 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume" (OuterVolumeSpecName: "config-volume") pod "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" (UID: "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.930334 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" (UID: "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.931620 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926" (OuterVolumeSpecName: "kube-api-access-sr926") pod "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" (UID: "84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf"). InnerVolumeSpecName "kube-api-access-sr926". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.950959 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc8sn\" (UniqueName: \"kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn\") pod \"certified-operators-4257l\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.954334 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:48:14 crc kubenswrapper[4982]: E0122 05:48:14.954555 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" containerName="collect-profiles" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.954573 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" containerName="collect-profiles" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.954684 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" containerName="collect-profiles" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.955460 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.960412 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 05:48:14 crc kubenswrapper[4982]: I0122 05:48:14.977331 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018021 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dcqf\" (UniqueName: \"kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018091 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018202 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018252 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr926\" (UniqueName: \"kubernetes.io/projected/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-kube-api-access-sr926\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018266 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.018278 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.084347 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.094440 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:15 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:15 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:15 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.094550 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.120132 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dcqf\" (UniqueName: \"kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.120229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.120312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.121640 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.121672 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.142881 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.144092 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.152041 4982 patch_prober.go:28] interesting pod/console-f9d7485db-2rfct container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.152113 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2rfct" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.167365 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dcqf\" (UniqueName: \"kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf\") pod \"community-operators-dw5wf\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.185818 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.186936 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.222418 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.270472 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.325931 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8lw5\" (UniqueName: \"kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.326026 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.326095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.379658 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.380814 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.423243 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427258 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427329 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctvts\" (UniqueName: \"kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427445 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8lw5\" (UniqueName: \"kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.427491 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.428070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.428466 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.461915 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8lw5\" (UniqueName: \"kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5\") pod \"certified-operators-vprqb\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.507576 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" event={"ID":"3ef4a6b0-9952-42d7-ba4f-a92f6aa8505f","Type":"ContainerStarted","Data":"5f5c4a0d295d92622edea9c3a65c0e7e6c96a1a0a1c6be9ae94f685ee1b8c5a7"} Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.508992 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" event={"ID":"84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf","Type":"ContainerDied","Data":"ef81488334181c61f6f67f8c2d59eb858dea46683c8da618558c1293e0188943"} Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.509037 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef81488334181c61f6f67f8c2d59eb858dea46683c8da618558c1293e0188943" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.509014 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.514305 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" event={"ID":"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6","Type":"ContainerStarted","Data":"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f"} Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.521657 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.531084 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctvts\" (UniqueName: \"kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.531140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.531167 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.531700 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.532021 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.581407 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctvts\" (UniqueName: \"kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts\") pod \"community-operators-ns82k\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.593003 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" podStartSLOduration=134.59297029 podStartE2EDuration="2m14.59297029s" podCreationTimestamp="2026-01-22 05:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:15.567543165 +0000 UTC m=+156.406181168" watchObservedRunningTime="2026-01-22 05:48:15.59297029 +0000 UTC m=+156.431608293" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.622904 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.646076 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" podStartSLOduration=133.64604328 podStartE2EDuration="2m13.64604328s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:48:15.59945069 +0000 UTC m=+156.438088683" watchObservedRunningTime="2026-01-22 05:48:15.64604328 +0000 UTC m=+156.484681283" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.665429 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.665602 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.681256 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.681550 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.693813 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.723166 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.738595 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.738699 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.749439 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:48:15 crc kubenswrapper[4982]: W0122 05:48:15.753181 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27d44766_c0c7_48dd_b00d_0fc5adeb8707.slice/crio-1d46a529df127b26000844db151734bd51d423df2d662dfae9248e8ca95f9715 WatchSource:0}: Error finding container 1d46a529df127b26000844db151734bd51d423df2d662dfae9248e8ca95f9715: Status 404 returned error can't find the container with id 1d46a529df127b26000844db151734bd51d423df2d662dfae9248e8ca95f9715 Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.839424 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.839479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.839780 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:15 crc kubenswrapper[4982]: I0122 05:48:15.878525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.007781 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.095065 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:16 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:16 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:16 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.095302 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.100380 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.124220 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.448484 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.523454 4982 generic.go:334] "Generic (PLEG): container finished" podID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerID="4ccaee24d48678395ff1f13722bfda6a1f94b7df9ef8f113ebb49289cf63d88d" exitCode=0 Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.523529 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerDied","Data":"4ccaee24d48678395ff1f13722bfda6a1f94b7df9ef8f113ebb49289cf63d88d"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.523559 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerStarted","Data":"7c5139f4db3f7bb14b70492dfcbfa0c42302003d39d3d4f84763cb7c99d1bbe7"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.528349 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.529203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b255c9bd-cfe5-4b43-943e-308cc6b7f32b","Type":"ContainerStarted","Data":"41054c0f1fc12269dec328a4940e82293d35e6623f150bf12945292a9282e11f"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.542431 4982 generic.go:334] "Generic (PLEG): container finished" podID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerID="8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312" exitCode=0 Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.543051 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerDied","Data":"8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.543099 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerStarted","Data":"1d46a529df127b26000844db151734bd51d423df2d662dfae9248e8ca95f9715"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.552281 4982 generic.go:334] "Generic (PLEG): container finished" podID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerID="103ad0949d280662497f71b686e45b2d178070ac1ac747545165dc05c1d39c0e" exitCode=0 Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.552347 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerDied","Data":"103ad0949d280662497f71b686e45b2d178070ac1ac747545165dc05c1d39c0e"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.552377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerStarted","Data":"8008c6624d5a996af18b5c6294d1d2dfbba6d9c902f3b60725b4f766393a94c2"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.566424 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerID="c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2" exitCode=0 Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.567064 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerDied","Data":"c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.567210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerStarted","Data":"01bb3939f5680bc556a62f4129fc4a74df62bbe29ff72ac2d94b6f467f26d780"} Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.567901 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.746476 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.747651 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.751983 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.769558 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.856785 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42b9z\" (UniqueName: \"kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.856885 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.856913 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.958026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42b9z\" (UniqueName: \"kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.958074 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.958097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.958577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.958681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:16 crc kubenswrapper[4982]: I0122 05:48:16.980627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42b9z\" (UniqueName: \"kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z\") pod \"redhat-marketplace-zt96v\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.073572 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.089493 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:17 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:17 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:17 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.089570 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.139351 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.141224 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.168691 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.168788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.168824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7hmc\" (UniqueName: \"kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.213012 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.271982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.272055 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.272077 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7hmc\" (UniqueName: \"kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.273591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.273620 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.296396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7hmc\" (UniqueName: \"kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc\") pod \"redhat-marketplace-pjnnz\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.370446 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:48:17 crc kubenswrapper[4982]: W0122 05:48:17.380384 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15dc31d8_8456_4ad8_859d_b34c6a85522a.slice/crio-0059a7fe644502aac523ef49c849c9907238ef51b1976d2519a08c0f1e49ee79 WatchSource:0}: Error finding container 0059a7fe644502aac523ef49c849c9907238ef51b1976d2519a08c0f1e49ee79: Status 404 returned error can't find the container with id 0059a7fe644502aac523ef49c849c9907238ef51b1976d2519a08c0f1e49ee79 Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.493417 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.603724 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerStarted","Data":"0059a7fe644502aac523ef49c849c9907238ef51b1976d2519a08c0f1e49ee79"} Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.756114 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.849294 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.954225 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.955344 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.958381 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 05:48:17 crc kubenswrapper[4982]: I0122 05:48:17.977152 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.002656 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.003036 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgccj\" (UniqueName: \"kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.003201 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.084365 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.088188 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:18 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:18 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:18 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.088250 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.113330 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.113513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.113563 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgccj\" (UniqueName: \"kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.115610 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.116359 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.163073 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgccj\" (UniqueName: \"kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj\") pod \"redhat-operators-rvmwc\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.274001 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.341329 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.342512 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.356097 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.420323 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.420693 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.420756 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4pcm\" (UniqueName: \"kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.522468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.522540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.522568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4pcm\" (UniqueName: \"kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.523333 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.523541 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.544328 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4pcm\" (UniqueName: \"kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm\") pod \"redhat-operators-f5wpb\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.565424 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:48:18 crc kubenswrapper[4982]: W0122 05:48:18.576119 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b4ebfeb_4674_45e8_952b_225d022d3392.slice/crio-f7fb456399c0ef23b40c8af5d804deed2bb05391bbe166103665c9df96b5ea21 WatchSource:0}: Error finding container f7fb456399c0ef23b40c8af5d804deed2bb05391bbe166103665c9df96b5ea21: Status 404 returned error can't find the container with id f7fb456399c0ef23b40c8af5d804deed2bb05391bbe166103665c9df96b5ea21 Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.650942 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerStarted","Data":"f7fb456399c0ef23b40c8af5d804deed2bb05391bbe166103665c9df96b5ea21"} Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.655944 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4f84179-f77d-4daa-82e5-04398aa15339" containerID="0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1" exitCode=0 Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.656141 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerDied","Data":"0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1"} Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.656246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerStarted","Data":"72e21f9abee13ca21a53b6da41420bdd7d781ff8fe01bd309dc9356f0ab93b7b"} Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.667153 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.667522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b255c9bd-cfe5-4b43-943e-308cc6b7f32b","Type":"ContainerDied","Data":"bbf3d81b8f43929f9de8cec62e3ccb994c72e82b6f07e949dbbfdf3e4d2ded7f"} Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.667445 4982 generic.go:334] "Generic (PLEG): container finished" podID="b255c9bd-cfe5-4b43-943e-308cc6b7f32b" containerID="bbf3d81b8f43929f9de8cec62e3ccb994c72e82b6f07e949dbbfdf3e4d2ded7f" exitCode=0 Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.684764 4982 generic.go:334] "Generic (PLEG): container finished" podID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerID="862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b" exitCode=0 Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.684834 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerDied","Data":"862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b"} Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.689894 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.690020 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.695837 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.974596 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:48:18 crc kubenswrapper[4982]: I0122 05:48:18.974685 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.012743 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:48:19 crc kubenswrapper[4982]: W0122 05:48:19.019295 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b466ca6_46a5_4fc3_91e4_9f433be5f12c.slice/crio-d9522ea0c40327144fc98f23c09384b750d4881ac2068f195433fa8649265885 WatchSource:0}: Error finding container d9522ea0c40327144fc98f23c09384b750d4881ac2068f195433fa8649265885: Status 404 returned error can't find the container with id d9522ea0c40327144fc98f23c09384b750d4881ac2068f195433fa8649265885 Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.089889 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:19 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:19 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:19 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.089985 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.250150 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-bchfc" Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.692911 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerID="2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4" exitCode=0 Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.693003 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerDied","Data":"2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4"} Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.694475 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerStarted","Data":"d9522ea0c40327144fc98f23c09384b750d4881ac2068f195433fa8649265885"} Jan 22 05:48:19 crc kubenswrapper[4982]: I0122 05:48:19.699582 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t8h5w" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.096366 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:20 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:20 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:20 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.096449 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.181652 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.266493 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir\") pod \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.266684 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access\") pod \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\" (UID: \"b255c9bd-cfe5-4b43-943e-308cc6b7f32b\") " Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.269296 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b255c9bd-cfe5-4b43-943e-308cc6b7f32b" (UID: "b255c9bd-cfe5-4b43-943e-308cc6b7f32b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.297088 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b255c9bd-cfe5-4b43-943e-308cc6b7f32b" (UID: "b255c9bd-cfe5-4b43-943e-308cc6b7f32b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.328156 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 05:48:20 crc kubenswrapper[4982]: E0122 05:48:20.328401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b255c9bd-cfe5-4b43-943e-308cc6b7f32b" containerName="pruner" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.328414 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b255c9bd-cfe5-4b43-943e-308cc6b7f32b" containerName="pruner" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.328526 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b255c9bd-cfe5-4b43-943e-308cc6b7f32b" containerName="pruner" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.329610 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.334792 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.334885 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.352324 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.369390 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.369488 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.369610 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.369624 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b255c9bd-cfe5-4b43-943e-308cc6b7f32b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.474227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.474309 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.475642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.646591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.662286 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.755519 4982 generic.go:334] "Generic (PLEG): container finished" podID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerID="74c493bf91ac4e573ca8e5fce3a19690233415a4f5583107b1ecdc7ae7e6c5a8" exitCode=0 Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.755628 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerDied","Data":"74c493bf91ac4e573ca8e5fce3a19690233415a4f5583107b1ecdc7ae7e6c5a8"} Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.762950 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b255c9bd-cfe5-4b43-943e-308cc6b7f32b","Type":"ContainerDied","Data":"41054c0f1fc12269dec328a4940e82293d35e6623f150bf12945292a9282e11f"} Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.763025 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41054c0f1fc12269dec328a4940e82293d35e6623f150bf12945292a9282e11f" Jan 22 05:48:20 crc kubenswrapper[4982]: I0122 05:48:20.763279 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 05:48:21 crc kubenswrapper[4982]: I0122 05:48:21.091188 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:21 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:21 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:21 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:21 crc kubenswrapper[4982]: I0122 05:48:21.091506 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:21 crc kubenswrapper[4982]: I0122 05:48:21.221808 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 05:48:21 crc kubenswrapper[4982]: W0122 05:48:21.290300 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf5577f04_5444_4228_90c9_31678e6a72df.slice/crio-5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d WatchSource:0}: Error finding container 5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d: Status 404 returned error can't find the container with id 5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d Jan 22 05:48:21 crc kubenswrapper[4982]: I0122 05:48:21.811803 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5577f04-5444-4228-90c9-31678e6a72df","Type":"ContainerStarted","Data":"5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d"} Jan 22 05:48:22 crc kubenswrapper[4982]: I0122 05:48:22.088379 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:22 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:22 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:22 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:22 crc kubenswrapper[4982]: I0122 05:48:22.088447 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:22 crc kubenswrapper[4982]: I0122 05:48:22.842172 4982 generic.go:334] "Generic (PLEG): container finished" podID="f5577f04-5444-4228-90c9-31678e6a72df" containerID="a9c170fa32460e21c22d71d4c8f532a8d71a8e66bb45b21ef6a090717e0af39d" exitCode=0 Jan 22 05:48:22 crc kubenswrapper[4982]: I0122 05:48:22.842231 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5577f04-5444-4228-90c9-31678e6a72df","Type":"ContainerDied","Data":"a9c170fa32460e21c22d71d4c8f532a8d71a8e66bb45b21ef6a090717e0af39d"} Jan 22 05:48:23 crc kubenswrapper[4982]: I0122 05:48:23.113201 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:23 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:23 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:23 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:23 crc kubenswrapper[4982]: I0122 05:48:23.113261 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.100571 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:24 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:24 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:24 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.100635 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.806831 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.815527 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/377cd579-2ade-48ea-ad2a-44d1546fd5fb-metrics-certs\") pod \"network-metrics-daemon-gxwx2\" (UID: \"377cd579-2ade-48ea-ad2a-44d1546fd5fb\") " pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.916320 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-j82cn" Jan 22 05:48:24 crc kubenswrapper[4982]: I0122 05:48:24.951062 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gxwx2" Jan 22 05:48:25 crc kubenswrapper[4982]: I0122 05:48:25.087710 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:25 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:25 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:25 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:25 crc kubenswrapper[4982]: I0122 05:48:25.087775 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:25 crc kubenswrapper[4982]: I0122 05:48:25.145117 4982 patch_prober.go:28] interesting pod/console-f9d7485db-2rfct container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 22 05:48:25 crc kubenswrapper[4982]: I0122 05:48:25.145191 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2rfct" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 22 05:48:26 crc kubenswrapper[4982]: I0122 05:48:26.088442 4982 patch_prober.go:28] interesting pod/router-default-5444994796-q5lsn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 05:48:26 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Jan 22 05:48:26 crc kubenswrapper[4982]: [+]process-running ok Jan 22 05:48:26 crc kubenswrapper[4982]: healthz check failed Jan 22 05:48:26 crc kubenswrapper[4982]: I0122 05:48:26.088538 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q5lsn" podUID="aa891b32-0d9c-4409-a53b-3d12d4e4edbc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 05:48:27 crc kubenswrapper[4982]: I0122 05:48:27.088974 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:27 crc kubenswrapper[4982]: I0122 05:48:27.092404 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-q5lsn" Jan 22 05:48:33 crc kubenswrapper[4982]: I0122 05:48:33.581380 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:48:35 crc kubenswrapper[4982]: I0122 05:48:35.148626 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:35 crc kubenswrapper[4982]: I0122 05:48:35.152814 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 05:48:47 crc kubenswrapper[4982]: I0122 05:48:47.897838 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dq4hk" Jan 22 05:48:48 crc kubenswrapper[4982]: I0122 05:48:48.802428 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 05:48:48 crc kubenswrapper[4982]: I0122 05:48:48.974316 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:48:48 crc kubenswrapper[4982]: I0122 05:48:48.975262 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.323756 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.328072 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.341339 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.354890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.355000 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.456362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.456961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.457034 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.477676 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:54 crc kubenswrapper[4982]: I0122 05:48:54.655983 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.785571 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.894004 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir\") pod \"f5577f04-5444-4228-90c9-31678e6a72df\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.894126 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access\") pod \"f5577f04-5444-4228-90c9-31678e6a72df\" (UID: \"f5577f04-5444-4228-90c9-31678e6a72df\") " Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.894169 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f5577f04-5444-4228-90c9-31678e6a72df" (UID: "f5577f04-5444-4228-90c9-31678e6a72df"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.894452 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5577f04-5444-4228-90c9-31678e6a72df-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.899971 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f5577f04-5444-4228-90c9-31678e6a72df" (UID: "f5577f04-5444-4228-90c9-31678e6a72df"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:48:55 crc kubenswrapper[4982]: I0122 05:48:55.995998 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5577f04-5444-4228-90c9-31678e6a72df-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:48:56 crc kubenswrapper[4982]: I0122 05:48:56.147495 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5577f04-5444-4228-90c9-31678e6a72df","Type":"ContainerDied","Data":"5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d"} Jan 22 05:48:56 crc kubenswrapper[4982]: I0122 05:48:56.147539 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5341c8286d66af5e21b179c62f7a45a0ce4894d460400a8e4a81e05a5cc8712d" Jan 22 05:48:56 crc kubenswrapper[4982]: I0122 05:48:56.147603 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.112186 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 05:48:59 crc kubenswrapper[4982]: E0122 05:48:59.113202 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5577f04-5444-4228-90c9-31678e6a72df" containerName="pruner" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.113229 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5577f04-5444-4228-90c9-31678e6a72df" containerName="pruner" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.113387 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5577f04-5444-4228-90c9-31678e6a72df" containerName="pruner" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.114069 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.138213 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.147071 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.147175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.147222 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.248355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.248491 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.248545 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.248495 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.248649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.278931 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access\") pod \"installer-9-crc\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:48:59 crc kubenswrapper[4982]: I0122 05:48:59.453178 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:49:07 crc kubenswrapper[4982]: E0122 05:49:07.761490 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 05:49:07 crc kubenswrapper[4982]: E0122 05:49:07.762000 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-42b9z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zt96v_openshift-marketplace(15dc31d8-8456-4ad8-859d-b34c6a85522a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:07 crc kubenswrapper[4982]: E0122 05:49:07.763225 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zt96v" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" Jan 22 05:49:13 crc kubenswrapper[4982]: E0122 05:49:13.466746 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 05:49:13 crc kubenswrapper[4982]: E0122 05:49:13.467801 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l4pcm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-f5wpb_openshift-marketplace(4b466ca6-46a5-4fc3-91e4-9f433be5f12c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:13 crc kubenswrapper[4982]: E0122 05:49:13.469369 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-f5wpb" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" Jan 22 05:49:15 crc kubenswrapper[4982]: E0122 05:49:15.005397 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zt96v" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" Jan 22 05:49:15 crc kubenswrapper[4982]: E0122 05:49:15.005902 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-f5wpb" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" Jan 22 05:49:15 crc kubenswrapper[4982]: E0122 05:49:15.072945 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 05:49:15 crc kubenswrapper[4982]: E0122 05:49:15.073468 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rc8sn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4257l_openshift-marketplace(d8d13857-ec18-4e5c-aadf-61479ff0b6c2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:15 crc kubenswrapper[4982]: E0122 05:49:15.075226 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4257l" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.396616 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4257l" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.490062 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.490340 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ctvts,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-ns82k_openshift-marketplace(5da449d5-cc54-4f87-b50c-fd684f5aa487): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.491514 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-ns82k" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.495495 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.495730 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6dcqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dw5wf_openshift-marketplace(27d44766-c0c7-48dd-b00d-0fc5adeb8707): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.498100 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dw5wf" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.511735 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.512088 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v8lw5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vprqb_openshift-marketplace(09250e91-1a80-4d8c-86c4-0c3f1e84cfcf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.514048 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vprqb" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.516055 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.516172 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgccj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rvmwc_openshift-marketplace(0b4ebfeb-4674-45e8-952b-225d022d3392): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.517869 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rvmwc" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.524749 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.524987 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l7hmc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pjnnz_openshift-marketplace(d4f84179-f77d-4daa-82e5-04398aa15339): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 05:49:16 crc kubenswrapper[4982]: E0122 05:49:16.526083 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pjnnz" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" Jan 22 05:49:16 crc kubenswrapper[4982]: I0122 05:49:16.640900 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 05:49:16 crc kubenswrapper[4982]: W0122 05:49:16.665839 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podcda29bd4_a01f_4c2a_b9e1_1e842578cb7c.slice/crio-55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64 WatchSource:0}: Error finding container 55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64: Status 404 returned error can't find the container with id 55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64 Jan 22 05:49:16 crc kubenswrapper[4982]: I0122 05:49:16.926223 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gxwx2"] Jan 22 05:49:16 crc kubenswrapper[4982]: I0122 05:49:16.929946 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.276595 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" event={"ID":"377cd579-2ade-48ea-ad2a-44d1546fd5fb","Type":"ContainerStarted","Data":"7409a5bee69f40f1a703459a3a23b34b4c69cc264fa22c636eecaeddc4caef23"} Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.277053 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" event={"ID":"377cd579-2ade-48ea-ad2a-44d1546fd5fb","Type":"ContainerStarted","Data":"7b288036f720acf111156591debcdbdb5cdd7037e77bd19c2c53378c4a92a170"} Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.279480 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c","Type":"ContainerStarted","Data":"8b8137785a2e484403601d03a133f97600a486202e093ff91982d38090bc7d3c"} Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.279507 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c","Type":"ContainerStarted","Data":"55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64"} Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.282061 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f17dc565-d8f5-4b5e-8139-1a679d6c99ae","Type":"ContainerStarted","Data":"e2dbd94f9630f1e513bfa43ed6436ae772d3b7fb1b6bdcaf294bbb19716c8fab"} Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.282783 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f17dc565-d8f5-4b5e-8139-1a679d6c99ae","Type":"ContainerStarted","Data":"9c160e4414ff7a30d8668f9e936b1c87c0e944014a2ac43f90b0bb3ce0c476a7"} Jan 22 05:49:17 crc kubenswrapper[4982]: E0122 05:49:17.284334 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pjnnz" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" Jan 22 05:49:17 crc kubenswrapper[4982]: E0122 05:49:17.284556 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vprqb" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" Jan 22 05:49:17 crc kubenswrapper[4982]: E0122 05:49:17.284786 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-ns82k" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" Jan 22 05:49:17 crc kubenswrapper[4982]: E0122 05:49:17.285223 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dw5wf" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.304303 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=23.304251243 podStartE2EDuration="23.304251243s" podCreationTimestamp="2026-01-22 05:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:49:17.291548161 +0000 UTC m=+218.130186174" watchObservedRunningTime="2026-01-22 05:49:17.304251243 +0000 UTC m=+218.142889246" Jan 22 05:49:17 crc kubenswrapper[4982]: I0122 05:49:17.321488 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=18.32145008 podStartE2EDuration="18.32145008s" podCreationTimestamp="2026-01-22 05:48:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:49:17.316003469 +0000 UTC m=+218.154641472" watchObservedRunningTime="2026-01-22 05:49:17.32145008 +0000 UTC m=+218.160088083" Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.289882 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gxwx2" event={"ID":"377cd579-2ade-48ea-ad2a-44d1546fd5fb","Type":"ContainerStarted","Data":"96065df6540b90e1c05bc63fe281b47d1d072dcf17e05229246521e85498013c"} Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.292199 4982 generic.go:334] "Generic (PLEG): container finished" podID="cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" containerID="8b8137785a2e484403601d03a133f97600a486202e093ff91982d38090bc7d3c" exitCode=0 Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.292532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c","Type":"ContainerDied","Data":"8b8137785a2e484403601d03a133f97600a486202e093ff91982d38090bc7d3c"} Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.312124 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gxwx2" podStartSLOduration=196.31209694 podStartE2EDuration="3m16.31209694s" podCreationTimestamp="2026-01-22 05:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:49:18.31066459 +0000 UTC m=+219.149302603" watchObservedRunningTime="2026-01-22 05:49:18.31209694 +0000 UTC m=+219.150734953" Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.974458 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.975267 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.976502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.977731 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 05:49:18 crc kubenswrapper[4982]: I0122 05:49:18.978092 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d" gracePeriod=600 Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.302297 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d" exitCode=0 Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.302358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d"} Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.620227 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.717236 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access\") pod \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.717422 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir\") pod \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\" (UID: \"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c\") " Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.717516 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" (UID: "cda29bd4-a01f-4c2a-b9e1-1e842578cb7c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.717882 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.726112 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" (UID: "cda29bd4-a01f-4c2a-b9e1-1e842578cb7c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:19 crc kubenswrapper[4982]: I0122 05:49:19.819753 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cda29bd4-a01f-4c2a-b9e1-1e842578cb7c-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:20 crc kubenswrapper[4982]: I0122 05:49:20.313426 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 05:49:20 crc kubenswrapper[4982]: I0122 05:49:20.313438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cda29bd4-a01f-4c2a-b9e1-1e842578cb7c","Type":"ContainerDied","Data":"55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64"} Jan 22 05:49:20 crc kubenswrapper[4982]: I0122 05:49:20.314054 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55bd47b1629e5d434454d3c01c88ad3627b18c98a6d68f6b0f31b2065846bb64" Jan 22 05:49:20 crc kubenswrapper[4982]: I0122 05:49:20.317679 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9"} Jan 22 05:49:26 crc kubenswrapper[4982]: I0122 05:49:26.245603 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:49:28 crc kubenswrapper[4982]: I0122 05:49:28.369209 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerStarted","Data":"f478aeee57c240f8dfbee340e08955be0cf8a7e20b7ffbf53abca210acecd208"} Jan 22 05:49:29 crc kubenswrapper[4982]: I0122 05:49:29.380087 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerID="e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd" exitCode=0 Jan 22 05:49:29 crc kubenswrapper[4982]: I0122 05:49:29.380158 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerDied","Data":"e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd"} Jan 22 05:49:29 crc kubenswrapper[4982]: I0122 05:49:29.383126 4982 generic.go:334] "Generic (PLEG): container finished" podID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerID="f478aeee57c240f8dfbee340e08955be0cf8a7e20b7ffbf53abca210acecd208" exitCode=0 Jan 22 05:49:29 crc kubenswrapper[4982]: I0122 05:49:29.383195 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerDied","Data":"f478aeee57c240f8dfbee340e08955be0cf8a7e20b7ffbf53abca210acecd208"} Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.393814 4982 generic.go:334] "Generic (PLEG): container finished" podID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerID="2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f" exitCode=0 Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.393964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerDied","Data":"2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f"} Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.397526 4982 generic.go:334] "Generic (PLEG): container finished" podID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerID="c92d09ab0e8ae277342ef77ff28c001f9a42eb17ff8c632b767270b6281e5e8f" exitCode=0 Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.397598 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerDied","Data":"c92d09ab0e8ae277342ef77ff28c001f9a42eb17ff8c632b767270b6281e5e8f"} Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.400367 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerStarted","Data":"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb"} Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.402801 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerStarted","Data":"b5899f015b1251f329e82c890ddccb8e822e1f4bda28620f46feefb4341c0c71"} Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.462650 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rvmwc" podStartSLOduration=4.468809855 podStartE2EDuration="1m13.462622428s" podCreationTimestamp="2026-01-22 05:48:17 +0000 UTC" firstStartedPulling="2026-01-22 05:48:20.765279885 +0000 UTC m=+161.603917888" lastFinishedPulling="2026-01-22 05:49:29.759092458 +0000 UTC m=+230.597730461" observedRunningTime="2026-01-22 05:49:30.438187031 +0000 UTC m=+231.276825024" watchObservedRunningTime="2026-01-22 05:49:30.462622428 +0000 UTC m=+231.301260421" Jan 22 05:49:30 crc kubenswrapper[4982]: I0122 05:49:30.463199 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f5wpb" podStartSLOduration=3.431725566 podStartE2EDuration="1m12.463194224s" podCreationTimestamp="2026-01-22 05:48:18 +0000 UTC" firstStartedPulling="2026-01-22 05:48:20.760590183 +0000 UTC m=+161.599228186" lastFinishedPulling="2026-01-22 05:49:29.792058841 +0000 UTC m=+230.630696844" observedRunningTime="2026-01-22 05:49:30.458683669 +0000 UTC m=+231.297321672" watchObservedRunningTime="2026-01-22 05:49:30.463194224 +0000 UTC m=+231.301832227" Jan 22 05:49:31 crc kubenswrapper[4982]: I0122 05:49:31.412167 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerStarted","Data":"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a"} Jan 22 05:49:31 crc kubenswrapper[4982]: I0122 05:49:31.414705 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerStarted","Data":"c254069b6e581f091fbeec766b15307d5a0bfbab6bd8dc24ea6be393233fe2c2"} Jan 22 05:49:31 crc kubenswrapper[4982]: I0122 05:49:31.437398 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zt96v" podStartSLOduration=3.1577540920000002 podStartE2EDuration="1m15.437380188s" podCreationTimestamp="2026-01-22 05:48:16 +0000 UTC" firstStartedPulling="2026-01-22 05:48:18.725818823 +0000 UTC m=+159.564456826" lastFinishedPulling="2026-01-22 05:49:31.005444919 +0000 UTC m=+231.844082922" observedRunningTime="2026-01-22 05:49:31.431223048 +0000 UTC m=+232.269861101" watchObservedRunningTime="2026-01-22 05:49:31.437380188 +0000 UTC m=+232.276018191" Jan 22 05:49:31 crc kubenswrapper[4982]: I0122 05:49:31.460937 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vprqb" podStartSLOduration=2.185117619 podStartE2EDuration="1m16.460899529s" podCreationTimestamp="2026-01-22 05:48:15 +0000 UTC" firstStartedPulling="2026-01-22 05:48:16.554090697 +0000 UTC m=+157.392728700" lastFinishedPulling="2026-01-22 05:49:30.829872597 +0000 UTC m=+231.668510610" observedRunningTime="2026-01-22 05:49:31.456596301 +0000 UTC m=+232.295234304" watchObservedRunningTime="2026-01-22 05:49:31.460899529 +0000 UTC m=+232.299537532" Jan 22 05:49:32 crc kubenswrapper[4982]: I0122 05:49:32.427128 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerID="b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9" exitCode=0 Jan 22 05:49:32 crc kubenswrapper[4982]: I0122 05:49:32.427233 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerDied","Data":"b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9"} Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.445896 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerStarted","Data":"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780"} Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.451496 4982 generic.go:334] "Generic (PLEG): container finished" podID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerID="b0ffb65db5d1973bebb288347877f96ed7dedf8db5ef4ec23de3bb13a662fcfe" exitCode=0 Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.451583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerDied","Data":"b0ffb65db5d1973bebb288347877f96ed7dedf8db5ef4ec23de3bb13a662fcfe"} Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.462231 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4f84179-f77d-4daa-82e5-04398aa15339" containerID="127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e" exitCode=0 Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.462275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerDied","Data":"127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e"} Jan 22 05:49:33 crc kubenswrapper[4982]: I0122 05:49:33.506248 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4257l" podStartSLOduration=3.160541395 podStartE2EDuration="1m19.506217242s" podCreationTimestamp="2026-01-22 05:48:14 +0000 UTC" firstStartedPulling="2026-01-22 05:48:16.570073856 +0000 UTC m=+157.408711859" lastFinishedPulling="2026-01-22 05:49:32.915749703 +0000 UTC m=+233.754387706" observedRunningTime="2026-01-22 05:49:33.479638327 +0000 UTC m=+234.318276330" watchObservedRunningTime="2026-01-22 05:49:33.506217242 +0000 UTC m=+234.344855255" Jan 22 05:49:34 crc kubenswrapper[4982]: I0122 05:49:34.469978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerStarted","Data":"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966"} Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.084761 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.084837 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.364933 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.477830 4982 generic.go:334] "Generic (PLEG): container finished" podID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerID="a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966" exitCode=0 Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.477955 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerDied","Data":"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966"} Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.522228 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.522648 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:35 crc kubenswrapper[4982]: I0122 05:49:35.566110 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:36 crc kubenswrapper[4982]: I0122 05:49:36.528529 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:37 crc kubenswrapper[4982]: I0122 05:49:37.074316 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:49:37 crc kubenswrapper[4982]: I0122 05:49:37.074385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:49:37 crc kubenswrapper[4982]: I0122 05:49:37.119968 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:49:37 crc kubenswrapper[4982]: I0122 05:49:37.358469 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:49:37 crc kubenswrapper[4982]: I0122 05:49:37.536265 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.275172 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.275314 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.344432 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.496982 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vprqb" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="registry-server" containerID="cri-o://c254069b6e581f091fbeec766b15307d5a0bfbab6bd8dc24ea6be393233fe2c2" gracePeriod=2 Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.552406 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.668363 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.668762 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:38 crc kubenswrapper[4982]: I0122 05:49:38.746161 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:39 crc kubenswrapper[4982]: I0122 05:49:39.508562 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerStarted","Data":"94f779918720c22811ccc71afbb2c0dcd6f710ed97e16eac02021f4edc65a2ad"} Jan 22 05:49:39 crc kubenswrapper[4982]: I0122 05:49:39.512194 4982 generic.go:334] "Generic (PLEG): container finished" podID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerID="c254069b6e581f091fbeec766b15307d5a0bfbab6bd8dc24ea6be393233fe2c2" exitCode=0 Jan 22 05:49:39 crc kubenswrapper[4982]: I0122 05:49:39.512275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerDied","Data":"c254069b6e581f091fbeec766b15307d5a0bfbab6bd8dc24ea6be393233fe2c2"} Jan 22 05:49:39 crc kubenswrapper[4982]: I0122 05:49:39.562206 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.087018 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.191383 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content\") pod \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.191485 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8lw5\" (UniqueName: \"kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5\") pod \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.191648 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities\") pod \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\" (UID: \"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf\") " Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.192832 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities" (OuterVolumeSpecName: "utilities") pod "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" (UID: "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.200374 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5" (OuterVolumeSpecName: "kube-api-access-v8lw5") pod "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" (UID: "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf"). InnerVolumeSpecName "kube-api-access-v8lw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.244409 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" (UID: "09250e91-1a80-4d8c-86c4-0c3f1e84cfcf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.293831 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.293992 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.294031 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8lw5\" (UniqueName: \"kubernetes.io/projected/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf-kube-api-access-v8lw5\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.523590 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vprqb" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.532429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vprqb" event={"ID":"09250e91-1a80-4d8c-86c4-0c3f1e84cfcf","Type":"ContainerDied","Data":"8008c6624d5a996af18b5c6294d1d2dfbba6d9c902f3b60725b4f766393a94c2"} Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.532522 4982 scope.go:117] "RemoveContainer" containerID="c254069b6e581f091fbeec766b15307d5a0bfbab6bd8dc24ea6be393233fe2c2" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.567173 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ns82k" podStartSLOduration=3.515894817 podStartE2EDuration="1m25.567149944s" podCreationTimestamp="2026-01-22 05:48:15 +0000 UTC" firstStartedPulling="2026-01-22 05:48:16.528053365 +0000 UTC m=+157.366691368" lastFinishedPulling="2026-01-22 05:49:38.579308492 +0000 UTC m=+239.417946495" observedRunningTime="2026-01-22 05:49:40.566672911 +0000 UTC m=+241.405310964" watchObservedRunningTime="2026-01-22 05:49:40.567149944 +0000 UTC m=+241.405787947" Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.587488 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:49:40 crc kubenswrapper[4982]: I0122 05:49:40.591212 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vprqb"] Jan 22 05:49:41 crc kubenswrapper[4982]: I0122 05:49:41.284721 4982 scope.go:117] "RemoveContainer" containerID="c92d09ab0e8ae277342ef77ff28c001f9a42eb17ff8c632b767270b6281e5e8f" Jan 22 05:49:41 crc kubenswrapper[4982]: I0122 05:49:41.301731 4982 scope.go:117] "RemoveContainer" containerID="103ad0949d280662497f71b686e45b2d178070ac1ac747545165dc05c1d39c0e" Jan 22 05:49:41 crc kubenswrapper[4982]: I0122 05:49:41.727205 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" path="/var/lib/kubelet/pods/09250e91-1a80-4d8c-86c4-0c3f1e84cfcf/volumes" Jan 22 05:49:41 crc kubenswrapper[4982]: I0122 05:49:41.762903 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:49:41 crc kubenswrapper[4982]: I0122 05:49:41.763394 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f5wpb" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="registry-server" containerID="cri-o://b5899f015b1251f329e82c890ddccb8e822e1f4bda28620f46feefb4341c0c71" gracePeriod=2 Jan 22 05:49:43 crc kubenswrapper[4982]: I0122 05:49:43.551015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerStarted","Data":"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02"} Jan 22 05:49:43 crc kubenswrapper[4982]: I0122 05:49:43.556558 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerDied","Data":"b5899f015b1251f329e82c890ddccb8e822e1f4bda28620f46feefb4341c0c71"} Jan 22 05:49:43 crc kubenswrapper[4982]: I0122 05:49:43.556636 4982 generic.go:334] "Generic (PLEG): container finished" podID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerID="b5899f015b1251f329e82c890ddccb8e822e1f4bda28620f46feefb4341c0c71" exitCode=0 Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.531416 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.551789 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dw5wf" podStartSLOduration=5.81240726 podStartE2EDuration="1m30.551762995s" podCreationTimestamp="2026-01-22 05:48:14 +0000 UTC" firstStartedPulling="2026-01-22 05:48:16.545592154 +0000 UTC m=+157.384230157" lastFinishedPulling="2026-01-22 05:49:41.284947879 +0000 UTC m=+242.123585892" observedRunningTime="2026-01-22 05:49:43.58189818 +0000 UTC m=+244.420536213" watchObservedRunningTime="2026-01-22 05:49:44.551762995 +0000 UTC m=+245.390401008" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.564350 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f5wpb" event={"ID":"4b466ca6-46a5-4fc3-91e4-9f433be5f12c","Type":"ContainerDied","Data":"d9522ea0c40327144fc98f23c09384b750d4881ac2068f195433fa8649265885"} Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.564869 4982 scope.go:117] "RemoveContainer" containerID="b5899f015b1251f329e82c890ddccb8e822e1f4bda28620f46feefb4341c0c71" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.564406 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f5wpb" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.583429 4982 scope.go:117] "RemoveContainer" containerID="f478aeee57c240f8dfbee340e08955be0cf8a7e20b7ffbf53abca210acecd208" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.603417 4982 scope.go:117] "RemoveContainer" containerID="74c493bf91ac4e573ca8e5fce3a19690233415a4f5583107b1ecdc7ae7e6c5a8" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.662788 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4pcm\" (UniqueName: \"kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm\") pod \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.662897 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities\") pod \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.662944 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content\") pod \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\" (UID: \"4b466ca6-46a5-4fc3-91e4-9f433be5f12c\") " Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.664629 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities" (OuterVolumeSpecName: "utilities") pod "4b466ca6-46a5-4fc3-91e4-9f433be5f12c" (UID: "4b466ca6-46a5-4fc3-91e4-9f433be5f12c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.677178 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm" (OuterVolumeSpecName: "kube-api-access-l4pcm") pod "4b466ca6-46a5-4fc3-91e4-9f433be5f12c" (UID: "4b466ca6-46a5-4fc3-91e4-9f433be5f12c"). InnerVolumeSpecName "kube-api-access-l4pcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.764842 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4pcm\" (UniqueName: \"kubernetes.io/projected/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-kube-api-access-l4pcm\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.764904 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.823216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b466ca6-46a5-4fc3-91e4-9f433be5f12c" (UID: "4b466ca6-46a5-4fc3-91e4-9f433be5f12c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.867071 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b466ca6-46a5-4fc3-91e4-9f433be5f12c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.895169 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:49:44 crc kubenswrapper[4982]: I0122 05:49:44.903602 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f5wpb"] Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.141636 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.271723 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.272067 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.329096 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.577837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerStarted","Data":"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0"} Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.602062 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pjnnz" podStartSLOduration=2.942412992 podStartE2EDuration="1m28.602033667s" podCreationTimestamp="2026-01-22 05:48:17 +0000 UTC" firstStartedPulling="2026-01-22 05:48:18.658148091 +0000 UTC m=+159.496786104" lastFinishedPulling="2026-01-22 05:49:44.317768766 +0000 UTC m=+245.156406779" observedRunningTime="2026-01-22 05:49:45.601220685 +0000 UTC m=+246.439858688" watchObservedRunningTime="2026-01-22 05:49:45.602033667 +0000 UTC m=+246.440671670" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.730548 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" path="/var/lib/kubelet/pods/4b466ca6-46a5-4fc3-91e4-9f433be5f12c/volumes" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.731782 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.731948 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:45 crc kubenswrapper[4982]: I0122 05:49:45.768999 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:46 crc kubenswrapper[4982]: I0122 05:49:46.629898 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:47 crc kubenswrapper[4982]: I0122 05:49:47.494052 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:49:47 crc kubenswrapper[4982]: I0122 05:49:47.495068 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:49:47 crc kubenswrapper[4982]: I0122 05:49:47.542938 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:49:50 crc kubenswrapper[4982]: I0122 05:49:50.765623 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:49:50 crc kubenswrapper[4982]: I0122 05:49:50.766580 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ns82k" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="registry-server" containerID="cri-o://94f779918720c22811ccc71afbb2c0dcd6f710ed97e16eac02021f4edc65a2ad" gracePeriod=2 Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.283706 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerName="oauth-openshift" containerID="cri-o://0faa21172f5ab02f904dcf3d83494afcbd7d3db2678ed5365781314150bfc3b5" gracePeriod=15 Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.631408 4982 generic.go:334] "Generic (PLEG): container finished" podID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerID="94f779918720c22811ccc71afbb2c0dcd6f710ed97e16eac02021f4edc65a2ad" exitCode=0 Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.631645 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerDied","Data":"94f779918720c22811ccc71afbb2c0dcd6f710ed97e16eac02021f4edc65a2ad"} Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.635388 4982 generic.go:334] "Generic (PLEG): container finished" podID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerID="0faa21172f5ab02f904dcf3d83494afcbd7d3db2678ed5365781314150bfc3b5" exitCode=0 Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.635439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" event={"ID":"3ab253fb-76c6-4f2c-b718-c37afd8540ff","Type":"ContainerDied","Data":"0faa21172f5ab02f904dcf3d83494afcbd7d3db2678ed5365781314150bfc3b5"} Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.792167 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.798536 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.841676 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-gmgc8"] Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842178 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842202 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842230 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842242 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842264 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842275 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842287 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" containerName="pruner" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842298 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" containerName="pruner" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842311 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842321 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842340 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842350 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842364 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842374 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="extract-content" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842392 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842402 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842415 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842425 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842517 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerName="oauth-openshift" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842528 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerName="oauth-openshift" Jan 22 05:49:51 crc kubenswrapper[4982]: E0122 05:49:51.842542 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842552 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="extract-utilities" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842770 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda29bd4-a01f-4c2a-b9e1-1e842578cb7c" containerName="pruner" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842796 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842806 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" containerName="oauth-openshift" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842820 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="09250e91-1a80-4d8c-86c4-0c3f1e84cfcf" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.842836 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b466ca6-46a5-4fc3-91e4-9f433be5f12c" containerName="registry-server" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.843712 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.847597 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-gmgc8"] Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876284 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876363 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876489 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities\") pod \"5da449d5-cc54-4f87-b50c-fd684f5aa487\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876593 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content\") pod \"5da449d5-cc54-4f87-b50c-fd684f5aa487\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876623 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876650 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876673 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876709 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876751 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876797 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctvts\" (UniqueName: \"kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts\") pod \"5da449d5-cc54-4f87-b50c-fd684f5aa487\" (UID: \"5da449d5-cc54-4f87-b50c-fd684f5aa487\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876824 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876888 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876915 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwv6m\" (UniqueName: \"kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876947 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.876978 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies\") pod \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\" (UID: \"3ab253fb-76c6-4f2c-b718-c37afd8540ff\") " Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877231 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877267 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877297 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877322 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877360 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-policies\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877391 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877440 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877471 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bglrg\" (UniqueName: \"kubernetes.io/projected/13f2ecc8-51d2-4022-9f90-fdf167ddc668-kube-api-access-bglrg\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877531 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-dir\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877569 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877589 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877613 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.877749 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.880119 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.880672 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.880656 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities" (OuterVolumeSpecName: "utilities") pod "5da449d5-cc54-4f87-b50c-fd684f5aa487" (UID: "5da449d5-cc54-4f87-b50c-fd684f5aa487"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.881386 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.885051 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.887901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.888450 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.888533 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts" (OuterVolumeSpecName: "kube-api-access-ctvts") pod "5da449d5-cc54-4f87-b50c-fd684f5aa487" (UID: "5da449d5-cc54-4f87-b50c-fd684f5aa487"). InnerVolumeSpecName "kube-api-access-ctvts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.889225 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.889519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m" (OuterVolumeSpecName: "kube-api-access-pwv6m") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "kube-api-access-pwv6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.891033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.891530 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.892086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.892360 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.893992 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "3ab253fb-76c6-4f2c-b718-c37afd8540ff" (UID: "3ab253fb-76c6-4f2c-b718-c37afd8540ff"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.942328 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5da449d5-cc54-4f87-b50c-fd684f5aa487" (UID: "5da449d5-cc54-4f87-b50c-fd684f5aa487"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979767 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-policies\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979793 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979889 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979922 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bglrg\" (UniqueName: \"kubernetes.io/projected/13f2ecc8-51d2-4022-9f90-fdf167ddc668-kube-api-access-bglrg\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979943 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-dir\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979972 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.979998 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980087 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980149 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctvts\" (UniqueName: \"kubernetes.io/projected/5da449d5-cc54-4f87-b50c-fd684f5aa487-kube-api-access-ctvts\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980165 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980180 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980193 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwv6m\" (UniqueName: \"kubernetes.io/projected/3ab253fb-76c6-4f2c-b718-c37afd8540ff-kube-api-access-pwv6m\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980205 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980218 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980229 4982 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab253fb-76c6-4f2c-b718-c37afd8540ff-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980240 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980252 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980266 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980281 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980293 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da449d5-cc54-4f87-b50c-fd684f5aa487-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980307 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980319 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980334 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980347 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.980361 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3ab253fb-76c6-4f2c-b718-c37afd8540ff-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.981084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-dir\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.981500 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-audit-policies\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.984446 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.985112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.985914 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.986577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.987068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.987098 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.989668 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.990279 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.991227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.991345 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:51 crc kubenswrapper[4982]: I0122 05:49:51.992414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/13f2ecc8-51d2-4022-9f90-fdf167ddc668-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.009763 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bglrg\" (UniqueName: \"kubernetes.io/projected/13f2ecc8-51d2-4022-9f90-fdf167ddc668-kube-api-access-bglrg\") pod \"oauth-openshift-594b4c596d-gmgc8\" (UID: \"13f2ecc8-51d2-4022-9f90-fdf167ddc668\") " pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.161678 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.421280 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-gmgc8"] Jan 22 05:49:52 crc kubenswrapper[4982]: W0122 05:49:52.428402 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13f2ecc8_51d2_4022_9f90_fdf167ddc668.slice/crio-67c9e9e552125d86fe3da8e3bc2c0bcd6c96e6b792d3e36695217016de2da936 WatchSource:0}: Error finding container 67c9e9e552125d86fe3da8e3bc2c0bcd6c96e6b792d3e36695217016de2da936: Status 404 returned error can't find the container with id 67c9e9e552125d86fe3da8e3bc2c0bcd6c96e6b792d3e36695217016de2da936 Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.642899 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" event={"ID":"13f2ecc8-51d2-4022-9f90-fdf167ddc668","Type":"ContainerStarted","Data":"67c9e9e552125d86fe3da8e3bc2c0bcd6c96e6b792d3e36695217016de2da936"} Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.646485 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" event={"ID":"3ab253fb-76c6-4f2c-b718-c37afd8540ff","Type":"ContainerDied","Data":"8a3eae5bdeed3145462499c8a5506d12f14331bea9a8564504dab22088019da4"} Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.646580 4982 scope.go:117] "RemoveContainer" containerID="0faa21172f5ab02f904dcf3d83494afcbd7d3db2678ed5365781314150bfc3b5" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.646501 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lbczg" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.650997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ns82k" event={"ID":"5da449d5-cc54-4f87-b50c-fd684f5aa487","Type":"ContainerDied","Data":"7c5139f4db3f7bb14b70492dfcbfa0c42302003d39d3d4f84763cb7c99d1bbe7"} Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.651052 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ns82k" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.677428 4982 scope.go:117] "RemoveContainer" containerID="94f779918720c22811ccc71afbb2c0dcd6f710ed97e16eac02021f4edc65a2ad" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.691262 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.696651 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lbczg"] Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.708033 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.711205 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ns82k"] Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.712242 4982 scope.go:117] "RemoveContainer" containerID="b0ffb65db5d1973bebb288347877f96ed7dedf8db5ef4ec23de3bb13a662fcfe" Jan 22 05:49:52 crc kubenswrapper[4982]: I0122 05:49:52.735720 4982 scope.go:117] "RemoveContainer" containerID="4ccaee24d48678395ff1f13722bfda6a1f94b7df9ef8f113ebb49289cf63d88d" Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.674794 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" event={"ID":"13f2ecc8-51d2-4022-9f90-fdf167ddc668","Type":"ContainerStarted","Data":"7c268c6b74624922e6f382ed07f9043cee6e3e38d5ccc5d10e71cee0ede27058"} Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.675533 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.687537 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.722282 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-594b4c596d-gmgc8" podStartSLOduration=27.722250802 podStartE2EDuration="27.722250802s" podCreationTimestamp="2026-01-22 05:49:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:49:53.715231897 +0000 UTC m=+254.553869990" watchObservedRunningTime="2026-01-22 05:49:53.722250802 +0000 UTC m=+254.560888845" Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.737903 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab253fb-76c6-4f2c-b718-c37afd8540ff" path="/var/lib/kubelet/pods/3ab253fb-76c6-4f2c-b718-c37afd8540ff/volumes" Jan 22 05:49:53 crc kubenswrapper[4982]: I0122 05:49:53.739161 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5da449d5-cc54-4f87-b50c-fd684f5aa487" path="/var/lib/kubelet/pods/5da449d5-cc54-4f87-b50c-fd684f5aa487/volumes" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.246726 4982 file.go:109] "Unable to process watch event" err="can't process config file \"/etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml\": /etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml: couldn't parse as pod(Object 'Kind' is missing in 'null'), please check config file" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.250318 4982 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252117 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252195 4982 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252805 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6" gracePeriod=15 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252912 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c" gracePeriod=15 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252845 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de" gracePeriod=15 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.253120 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220" gracePeriod=15 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.252926 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a" gracePeriod=15 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.254747 4982 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255277 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255309 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255342 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255359 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255391 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255407 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255428 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255444 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255471 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255488 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 05:49:55 crc kubenswrapper[4982]: E0122 05:49:55.255513 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255532 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255898 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255931 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255951 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.255990 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.256008 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.340610 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341256 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341432 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341586 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341727 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.341900 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.342103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.356755 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.360325 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.360782 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444058 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444493 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444685 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444997 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444588 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.444539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445220 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445360 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445608 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445934 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.447475 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.446179 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.445771 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.447646 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.692394 4982 generic.go:334] "Generic (PLEG): container finished" podID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" containerID="e2dbd94f9630f1e513bfa43ed6436ae772d3b7fb1b6bdcaf294bbb19716c8fab" exitCode=0 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.692580 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f17dc565-d8f5-4b5e-8139-1a679d6c99ae","Type":"ContainerDied","Data":"e2dbd94f9630f1e513bfa43ed6436ae772d3b7fb1b6bdcaf294bbb19716c8fab"} Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.694064 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.694547 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.695106 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.696492 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.697179 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6" exitCode=0 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.697203 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a" exitCode=0 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.697213 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de" exitCode=0 Jan 22 05:49:55 crc kubenswrapper[4982]: I0122 05:49:55.697227 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c" exitCode=2 Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.428462 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:49:56Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:49:56Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:49:56Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:49:56Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:27cf3abbf8fd467e0024e29f4a1590ade73c4e616041027fc414be0d345fbddc\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:61565de83851ce1a60a7f5484dc89d16992896eb24005c0196eed44fc53d8e6a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671130350},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:aad5e438ec868272540a84dfc53b266c8a08267bec7a7617871dddeb1511dcb2\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dd1e95af8b913ea8f010fa96cba36f2e7e5b1edfbf758c69b8c9eeb88c6911ea\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202744046},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f907bbb9feda871911fe6104a05039ba4876ca82e26d41398008385f8a7656b\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:cffa3fc70ac997825130e3768ebb1a24956f427e18870f87ef0654513b3cd657\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1168433908},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.429622 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.431423 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.433433 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.434348 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:56 crc kubenswrapper[4982]: E0122 05:49:56.434416 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.041536 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.042772 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.043217 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.087094 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock\") pod \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.087179 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir\") pod \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.087268 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access\") pod \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\" (UID: \"f17dc565-d8f5-4b5e-8139-1a679d6c99ae\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.088011 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock" (OuterVolumeSpecName: "var-lock") pod "f17dc565-d8f5-4b5e-8139-1a679d6c99ae" (UID: "f17dc565-d8f5-4b5e-8139-1a679d6c99ae"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.088043 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f17dc565-d8f5-4b5e-8139-1a679d6c99ae" (UID: "f17dc565-d8f5-4b5e-8139-1a679d6c99ae"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.092924 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f17dc565-d8f5-4b5e-8139-1a679d6c99ae" (UID: "f17dc565-d8f5-4b5e-8139-1a679d6c99ae"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.189278 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.189334 4982 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.189355 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f17dc565-d8f5-4b5e-8139-1a679d6c99ae-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.545633 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.547865 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.548531 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.549018 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.673556 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.675106 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.676725 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.677759 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.678534 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.679088 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.710550 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.710564 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f17dc565-d8f5-4b5e-8139-1a679d6c99ae","Type":"ContainerDied","Data":"9c160e4414ff7a30d8668f9e936b1c87c0e944014a2ac43f90b0bb3ce0c476a7"} Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.711223 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c160e4414ff7a30d8668f9e936b1c87c0e944014a2ac43f90b0bb3ce0c476a7" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.715819 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.717110 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220" exitCode=0 Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.717200 4982 scope.go:117] "RemoveContainer" containerID="b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.717475 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.743399 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.743732 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.744265 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.746324 4982 scope.go:117] "RemoveContainer" containerID="a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.770543 4982 scope.go:117] "RemoveContainer" containerID="c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799153 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799366 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799403 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799830 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.799882 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.800294 4982 scope.go:117] "RemoveContainer" containerID="c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.839884 4982 scope.go:117] "RemoveContainer" containerID="08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.873530 4982 scope.go:117] "RemoveContainer" containerID="b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.901236 4982 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.901303 4982 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.901321 4982 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.917640 4982 scope.go:117] "RemoveContainer" containerID="b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.919261 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\": container with ID starting with b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6 not found: ID does not exist" containerID="b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.919336 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6"} err="failed to get container status \"b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\": rpc error: code = NotFound desc = could not find container \"b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6\": container with ID starting with b6f314beef16a329e7c76436deaaa430c58e3d811f4f6a1608224ea2a7e81fb6 not found: ID does not exist" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.919404 4982 scope.go:117] "RemoveContainer" containerID="a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.920178 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\": container with ID starting with a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a not found: ID does not exist" containerID="a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.920225 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a"} err="failed to get container status \"a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\": rpc error: code = NotFound desc = could not find container \"a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a\": container with ID starting with a8b7cd4646984d93ee478b5d4c9e61c1c98754d850f93ccdb81341bfb8a93e9a not found: ID does not exist" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.920256 4982 scope.go:117] "RemoveContainer" containerID="c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.920605 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\": container with ID starting with c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de not found: ID does not exist" containerID="c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.920640 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de"} err="failed to get container status \"c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\": rpc error: code = NotFound desc = could not find container \"c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de\": container with ID starting with c783955e4c0c4abee351d27e7cd1267efd64a228df5cb9148cc362ea9134c5de not found: ID does not exist" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.920663 4982 scope.go:117] "RemoveContainer" containerID="c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.921001 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\": container with ID starting with c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c not found: ID does not exist" containerID="c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.921029 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c"} err="failed to get container status \"c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\": rpc error: code = NotFound desc = could not find container \"c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c\": container with ID starting with c134e4955f32117a0a14e723b802c32b36c415e9723ac86763258ae8204ec75c not found: ID does not exist" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.921048 4982 scope.go:117] "RemoveContainer" containerID="08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.921370 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\": container with ID starting with 08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220 not found: ID does not exist" containerID="08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.921398 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220"} err="failed to get container status \"08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\": rpc error: code = NotFound desc = could not find container \"08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220\": container with ID starting with 08353e6556f77ca944f6ce948786594256bc83213214abd00d79ec2424366220 not found: ID does not exist" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.921425 4982 scope.go:117] "RemoveContainer" containerID="b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3" Jan 22 05:49:57 crc kubenswrapper[4982]: E0122 05:49:57.921701 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\": container with ID starting with b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3 not found: ID does not exist" containerID="b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3" Jan 22 05:49:57 crc kubenswrapper[4982]: I0122 05:49:57.921732 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3"} err="failed to get container status \"b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\": rpc error: code = NotFound desc = could not find container \"b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3\": container with ID starting with b65d63842c9a2cec2ce664fc44b1c364e19cefa78d236f9f56a4d6f4bfb877c3 not found: ID does not exist" Jan 22 05:49:58 crc kubenswrapper[4982]: I0122 05:49:58.050352 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:58 crc kubenswrapper[4982]: I0122 05:49:58.051201 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:58 crc kubenswrapper[4982]: I0122 05:49:58.052020 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:58 crc kubenswrapper[4982]: I0122 05:49:58.052413 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:59 crc kubenswrapper[4982]: I0122 05:49:59.724575 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:59 crc kubenswrapper[4982]: I0122 05:49:59.725488 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:59 crc kubenswrapper[4982]: I0122 05:49:59.725749 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:59 crc kubenswrapper[4982]: I0122 05:49:59.726008 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:49:59 crc kubenswrapper[4982]: I0122 05:49:59.733810 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 22 05:50:00 crc kubenswrapper[4982]: E0122 05:50:00.325771 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.158:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:00 crc kubenswrapper[4982]: I0122 05:50:00.326329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:00 crc kubenswrapper[4982]: W0122 05:50:00.357899 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-5bfdb71340103ea1f73270cc77b6f4ba3a0861b5863ff41a0e36629d80a24b65 WatchSource:0}: Error finding container 5bfdb71340103ea1f73270cc77b6f4ba3a0861b5863ff41a0e36629d80a24b65: Status 404 returned error can't find the container with id 5bfdb71340103ea1f73270cc77b6f4ba3a0861b5863ff41a0e36629d80a24b65 Jan 22 05:50:00 crc kubenswrapper[4982]: E0122 05:50:00.363447 4982 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.158:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188cf79b609f6552 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 05:50:00.362812754 +0000 UTC m=+261.201450767,LastTimestamp:2026-01-22 05:50:00.362812754 +0000 UTC m=+261.201450767,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 05:50:00 crc kubenswrapper[4982]: I0122 05:50:00.755579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"5bfdb71340103ea1f73270cc77b6f4ba3a0861b5863ff41a0e36629d80a24b65"} Jan 22 05:50:01 crc kubenswrapper[4982]: I0122 05:50:01.764108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad"} Jan 22 05:50:01 crc kubenswrapper[4982]: E0122 05:50:01.764714 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.158:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:01 crc kubenswrapper[4982]: I0122 05:50:01.765283 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:01 crc kubenswrapper[4982]: I0122 05:50:01.765803 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:01 crc kubenswrapper[4982]: I0122 05:50:01.766169 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:02 crc kubenswrapper[4982]: E0122 05:50:02.775352 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.158:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.275224 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.275809 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.276344 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.276804 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.277380 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:03 crc kubenswrapper[4982]: I0122 05:50:03.277451 4982 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.277913 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" interval="200ms" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.479206 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" interval="400ms" Jan 22 05:50:03 crc kubenswrapper[4982]: E0122 05:50:03.882105 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" interval="800ms" Jan 22 05:50:04 crc kubenswrapper[4982]: E0122 05:50:04.683440 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" interval="1.6s" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.285527 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" interval="3.2s" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.659006 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:50:06Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:50:06Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:50:06Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T05:50:06Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:27cf3abbf8fd467e0024e29f4a1590ade73c4e616041027fc414be0d345fbddc\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:61565de83851ce1a60a7f5484dc89d16992896eb24005c0196eed44fc53d8e6a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671130350},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:aad5e438ec868272540a84dfc53b266c8a08267bec7a7617871dddeb1511dcb2\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dd1e95af8b913ea8f010fa96cba36f2e7e5b1edfbf758c69b8c9eeb88c6911ea\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202744046},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f907bbb9feda871911fe6104a05039ba4876ca82e26d41398008385f8a7656b\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:cffa3fc70ac997825130e3768ebb1a24956f427e18870f87ef0654513b3cd657\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1168433908},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.659950 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.660781 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.661644 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.662168 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.662203 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.718401 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.719511 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.719961 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.720659 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.739815 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.739877 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:06 crc kubenswrapper[4982]: E0122 05:50:06.740632 4982 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.741553 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:06 crc kubenswrapper[4982]: W0122 05:50:06.776928 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-5ef348b8a0ce4225fd3f297b0cf924913b421612e87016e6d8f42586cfcd591f WatchSource:0}: Error finding container 5ef348b8a0ce4225fd3f297b0cf924913b421612e87016e6d8f42586cfcd591f: Status 404 returned error can't find the container with id 5ef348b8a0ce4225fd3f297b0cf924913b421612e87016e6d8f42586cfcd591f Jan 22 05:50:06 crc kubenswrapper[4982]: I0122 05:50:06.809317 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5ef348b8a0ce4225fd3f297b0cf924913b421612e87016e6d8f42586cfcd591f"} Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.827388 4982 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="8278bb3b7f3f83a3cb45b4c0ab042046efaf4e83a90930a2294b9784b6faa703" exitCode=0 Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.828032 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"8278bb3b7f3f83a3cb45b4c0ab042046efaf4e83a90930a2294b9784b6faa703"} Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.828286 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.833508 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.829352 4982 status_manager.go:851] "Failed to get status for pod" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" pod="openshift-marketplace/community-operators-dw5wf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dw5wf\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:07 crc kubenswrapper[4982]: E0122 05:50:07.834986 4982 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.835048 4982 status_manager.go:851] "Failed to get status for pod" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" pod="openshift-marketplace/redhat-marketplace-pjnnz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pjnnz\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:07 crc kubenswrapper[4982]: I0122 05:50:07.836001 4982 status_manager.go:851] "Failed to get status for pod" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.158:6443: connect: connection refused" Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.309506 4982 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Readiness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.310088 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.848540 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"eeb1b6b1b6ccfe6e5ecc419cd8e52d5601a414fc7789c16afa2d46705299e45b"} Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.848605 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a82e4dad7e70fe1a8e3c672bb20b7be61b83ddbccbcb326f184239ba57845a1a"} Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.848618 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d97b2176b2c8a463033df59f08d9fab6b5a840e9d8e9811ccd40966818e47d91"} Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.860232 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.860307 4982 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114" exitCode=1 Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.860348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114"} Jan 22 05:50:08 crc kubenswrapper[4982]: I0122 05:50:08.860990 4982 scope.go:117] "RemoveContainer" containerID="c695436bd739c38cd72df822e4202be4c4516eb6ed03a8685e8ef161a33a3114" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.395834 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.880270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"348a1b92b580da7cd2df3e4a2715a7037869f511c885828963f5d7a9e88508ae"} Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.880336 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6afc68c1b87501a428d07fa914dedbf1a988cc896c037c008b3b9167445681fa"} Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.880445 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.880485 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.880514 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.883882 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 05:50:09 crc kubenswrapper[4982]: I0122 05:50:09.883951 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"241f9b149aaf8228bb734d857d477ed05d7eb6a415c2386e3a65e1a2115091c8"} Jan 22 05:50:11 crc kubenswrapper[4982]: I0122 05:50:11.750932 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:11 crc kubenswrapper[4982]: I0122 05:50:11.751003 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:11 crc kubenswrapper[4982]: I0122 05:50:11.757201 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:14 crc kubenswrapper[4982]: I0122 05:50:14.889792 4982 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:14 crc kubenswrapper[4982]: I0122 05:50:14.926992 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:14 crc kubenswrapper[4982]: I0122 05:50:14.927039 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:14 crc kubenswrapper[4982]: I0122 05:50:14.930958 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:14 crc kubenswrapper[4982]: I0122 05:50:14.933987 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ffe3af52-baa7-4467-8b06-f64e055d1e4a" Jan 22 05:50:15 crc kubenswrapper[4982]: I0122 05:50:15.934673 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:15 crc kubenswrapper[4982]: I0122 05:50:15.935310 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d271a46c-ce9f-453b-93fe-a489ea56d1e8" Jan 22 05:50:18 crc kubenswrapper[4982]: I0122 05:50:18.308931 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:50:19 crc kubenswrapper[4982]: I0122 05:50:19.395644 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:50:19 crc kubenswrapper[4982]: I0122 05:50:19.396257 4982 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 22 05:50:19 crc kubenswrapper[4982]: I0122 05:50:19.396357 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 22 05:50:19 crc kubenswrapper[4982]: I0122 05:50:19.755825 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ffe3af52-baa7-4467-8b06-f64e055d1e4a" Jan 22 05:50:25 crc kubenswrapper[4982]: I0122 05:50:25.470380 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 05:50:26 crc kubenswrapper[4982]: I0122 05:50:26.062955 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 05:50:26 crc kubenswrapper[4982]: I0122 05:50:26.399843 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 05:50:26 crc kubenswrapper[4982]: I0122 05:50:26.788718 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.016484 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.234417 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.355342 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.465296 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.646947 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.775213 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.824835 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.863394 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 05:50:27 crc kubenswrapper[4982]: I0122 05:50:27.942274 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.003364 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.067807 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.311757 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.333890 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.345241 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.355293 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.383164 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.394212 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.561186 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.580356 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.580618 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.729560 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 05:50:28 crc kubenswrapper[4982]: I0122 05:50:28.964239 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.065489 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.080595 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.082425 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.373535 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.400749 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.405390 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.485533 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.582764 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.848758 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.895911 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.902554 4982 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 05:50:29 crc kubenswrapper[4982]: I0122 05:50:29.987051 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.063610 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.078909 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.107123 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.210701 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.290720 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.343478 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.372926 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.405315 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.419941 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.531324 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.542733 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.708958 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 05:50:30 crc kubenswrapper[4982]: I0122 05:50:30.945164 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.010333 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.053308 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.089284 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.120157 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.207600 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.237064 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.325074 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.327473 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.352469 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.364989 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.469263 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.533650 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.657565 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.658807 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.666055 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.668834 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.675376 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.700779 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.717321 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.794836 4982 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.804102 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.804197 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.810131 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.828405 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.828383497 podStartE2EDuration="17.828383497s" podCreationTimestamp="2026-01-22 05:50:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:50:31.826504125 +0000 UTC m=+292.665142128" watchObservedRunningTime="2026-01-22 05:50:31.828383497 +0000 UTC m=+292.667021490" Jan 22 05:50:31 crc kubenswrapper[4982]: I0122 05:50:31.973259 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.048488 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.098088 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.099299 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.215125 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.215167 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.233545 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.260980 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.341134 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.361504 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.405562 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.529103 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.557325 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.570397 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.592698 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.641588 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.664398 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.669921 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.705920 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.736022 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.783295 4982 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.843837 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.895503 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.965341 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 05:50:32 crc kubenswrapper[4982]: I0122 05:50:32.973974 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.020218 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.024132 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.025660 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.048197 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.088064 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.124660 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.161616 4982 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.302734 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.334132 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.404453 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.435721 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.455938 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.466200 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.626988 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.644245 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.648804 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.764200 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.817365 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.846409 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.862833 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.930641 4982 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.942287 4982 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 05:50:33 crc kubenswrapper[4982]: I0122 05:50:33.961675 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.175557 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.222134 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.222446 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.262438 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.307084 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.472281 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.478386 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.489450 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.547092 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.608286 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.630412 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.640028 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.656714 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.719991 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.721589 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.863037 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.870079 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 05:50:34 crc kubenswrapper[4982]: I0122 05:50:34.879646 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.036071 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.094257 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.158497 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.184925 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.258824 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.273232 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.352006 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.358370 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.390373 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.432305 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.434253 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.490188 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.500893 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.501264 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.519260 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.621447 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.734973 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.736804 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.743635 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.777787 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.891914 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 05:50:35 crc kubenswrapper[4982]: I0122 05:50:35.993697 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.046560 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.046981 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.055665 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.092426 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.192695 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.200190 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.233711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.305250 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.368925 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.470598 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.482504 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.493548 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.558603 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.569430 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.579829 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.609022 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.628710 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.652033 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.759592 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.825817 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.854525 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.909837 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 05:50:36 crc kubenswrapper[4982]: I0122 05:50:36.927985 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.043003 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.179657 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.216905 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.295816 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.309073 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.361213 4982 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.361452 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad" gracePeriod=5 Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.399454 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.410614 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.439279 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.516504 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.578529 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.601257 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.639888 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.693394 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.782645 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.812790 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.930213 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 05:50:37 crc kubenswrapper[4982]: I0122 05:50:37.984021 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.078032 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.079978 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.104048 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.145951 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.168291 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.342595 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.388083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.404734 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.489896 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.544812 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.738224 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.786553 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.886949 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.927715 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.940583 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.951602 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 05:50:38 crc kubenswrapper[4982]: I0122 05:50:38.964750 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.046662 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.056889 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.256696 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.353051 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.394752 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.427873 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.483591 4982 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.531683 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.692488 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 05:50:39 crc kubenswrapper[4982]: I0122 05:50:39.722054 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.055312 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.111942 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.176663 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.297450 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.303411 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.573777 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.579718 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.839183 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 05:50:40 crc kubenswrapper[4982]: I0122 05:50:40.870787 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.000340 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.009644 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.033899 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.128827 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.181100 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.224403 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.268141 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.276643 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.780770 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 05:50:41 crc kubenswrapper[4982]: I0122 05:50:41.980652 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.004918 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.048182 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.058555 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.187245 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.591356 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.797979 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.970954 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 05:50:42 crc kubenswrapper[4982]: I0122 05:50:42.971544 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106411 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106509 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106587 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106580 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106619 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.106975 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108105 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108219 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108753 4982 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108796 4982 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108813 4982 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.108827 4982 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.117839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.144600 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.144662 4982 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad" exitCode=137 Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.144718 4982 scope.go:117] "RemoveContainer" containerID="ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.144779 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.167329 4982 scope.go:117] "RemoveContainer" containerID="ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad" Jan 22 05:50:43 crc kubenswrapper[4982]: E0122 05:50:43.168000 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad\": container with ID starting with ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad not found: ID does not exist" containerID="ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.168056 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad"} err="failed to get container status \"ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad\": rpc error: code = NotFound desc = could not find container \"ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad\": container with ID starting with ccd51868b30212296595ccfd99522c4e8f926f9ad1a9e32cfc6083b78b48f8ad not found: ID does not exist" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.211032 4982 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.268544 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.681734 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 05:50:43 crc kubenswrapper[4982]: I0122 05:50:43.727051 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 22 05:50:44 crc kubenswrapper[4982]: I0122 05:50:44.217428 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 05:50:44 crc kubenswrapper[4982]: I0122 05:50:44.337660 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 05:51:11 crc kubenswrapper[4982]: I0122 05:51:11.736261 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:51:11 crc kubenswrapper[4982]: I0122 05:51:11.737244 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerName="route-controller-manager" containerID="cri-o://1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65" gracePeriod=30 Jan 22 05:51:11 crc kubenswrapper[4982]: I0122 05:51:11.741784 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:51:11 crc kubenswrapper[4982]: I0122 05:51:11.742075 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" podUID="41db98f4-c026-42e8-8486-412643968146" containerName="controller-manager" containerID="cri-o://468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe" gracePeriod=30 Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.145962 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.151206 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca\") pod \"41db98f4-c026-42e8-8486-412643968146\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251355 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert\") pod \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251450 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kplwh\" (UniqueName: \"kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh\") pod \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251488 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca\") pod \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles\") pod \"41db98f4-c026-42e8-8486-412643968146\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251548 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert\") pod \"41db98f4-c026-42e8-8486-412643968146\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251578 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config\") pod \"41db98f4-c026-42e8-8486-412643968146\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251599 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config\") pod \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\" (UID: \"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.251628 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v45w4\" (UniqueName: \"kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4\") pod \"41db98f4-c026-42e8-8486-412643968146\" (UID: \"41db98f4-c026-42e8-8486-412643968146\") " Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.252492 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca" (OuterVolumeSpecName: "client-ca") pod "41db98f4-c026-42e8-8486-412643968146" (UID: "41db98f4-c026-42e8-8486-412643968146"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.252573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "41db98f4-c026-42e8-8486-412643968146" (UID: "41db98f4-c026-42e8-8486-412643968146"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.252705 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca" (OuterVolumeSpecName: "client-ca") pod "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" (UID: "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.252772 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config" (OuterVolumeSpecName: "config") pod "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" (UID: "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.252782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config" (OuterVolumeSpecName: "config") pod "41db98f4-c026-42e8-8486-412643968146" (UID: "41db98f4-c026-42e8-8486-412643968146"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.258768 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "41db98f4-c026-42e8-8486-412643968146" (UID: "41db98f4-c026-42e8-8486-412643968146"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.260133 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4" (OuterVolumeSpecName: "kube-api-access-v45w4") pod "41db98f4-c026-42e8-8486-412643968146" (UID: "41db98f4-c026-42e8-8486-412643968146"). InnerVolumeSpecName "kube-api-access-v45w4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.260895 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" (UID: "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.261202 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh" (OuterVolumeSpecName: "kube-api-access-kplwh") pod "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" (UID: "1b5ce2a2-8a71-4fbe-a860-c89c1be82f08"). InnerVolumeSpecName "kube-api-access-kplwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.351810 4982 generic.go:334] "Generic (PLEG): container finished" podID="41db98f4-c026-42e8-8486-412643968146" containerID="468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe" exitCode=0 Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.351925 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.351927 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" event={"ID":"41db98f4-c026-42e8-8486-412643968146","Type":"ContainerDied","Data":"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe"} Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.351981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jbn6z" event={"ID":"41db98f4-c026-42e8-8486-412643968146","Type":"ContainerDied","Data":"49a923596a64ca150a237f656779dfa92830e5675fb4fe9c71ac7b47ed4e8162"} Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.351999 4982 scope.go:117] "RemoveContainer" containerID="468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353451 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v45w4\" (UniqueName: \"kubernetes.io/projected/41db98f4-c026-42e8-8486-412643968146-kube-api-access-v45w4\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353471 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353507 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353520 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kplwh\" (UniqueName: \"kubernetes.io/projected/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-kube-api-access-kplwh\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353530 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353540 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353585 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41db98f4-c026-42e8-8486-412643968146-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353597 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41db98f4-c026-42e8-8486-412643968146-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.353607 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.354322 4982 generic.go:334] "Generic (PLEG): container finished" podID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerID="1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65" exitCode=0 Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.354357 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" event={"ID":"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08","Type":"ContainerDied","Data":"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65"} Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.354383 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" event={"ID":"1b5ce2a2-8a71-4fbe-a860-c89c1be82f08","Type":"ContainerDied","Data":"40dbedf114b92a318a8846a2dd75f4e81dd08f33aaed2d8d1b0913e3b4231e29"} Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.354471 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.381750 4982 scope.go:117] "RemoveContainer" containerID="468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe" Jan 22 05:51:12 crc kubenswrapper[4982]: E0122 05:51:12.382380 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe\": container with ID starting with 468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe not found: ID does not exist" containerID="468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.382490 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe"} err="failed to get container status \"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe\": rpc error: code = NotFound desc = could not find container \"468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe\": container with ID starting with 468b2a3ed1c4952ab29a9ddf38e5b5adfadec70a225a3884eb0372f064ef8bbe not found: ID does not exist" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.382539 4982 scope.go:117] "RemoveContainer" containerID="1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.391363 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.394609 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gsktj"] Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.407324 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.412715 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jbn6z"] Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.412828 4982 scope.go:117] "RemoveContainer" containerID="1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65" Jan 22 05:51:12 crc kubenswrapper[4982]: E0122 05:51:12.413573 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65\": container with ID starting with 1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65 not found: ID does not exist" containerID="1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65" Jan 22 05:51:12 crc kubenswrapper[4982]: I0122 05:51:12.413609 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65"} err="failed to get container status \"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65\": rpc error: code = NotFound desc = could not find container \"1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65\": container with ID starting with 1c1974234c0af15e8d2e7bb332674736c37b62b6672fefa131a5544137fe5e65 not found: ID does not exist" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.393431 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:13 crc kubenswrapper[4982]: E0122 05:51:13.394357 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" containerName="installer" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394381 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" containerName="installer" Jan 22 05:51:13 crc kubenswrapper[4982]: E0122 05:51:13.394401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerName="route-controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394414 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerName="route-controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: E0122 05:51:13.394429 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394439 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 05:51:13 crc kubenswrapper[4982]: E0122 05:51:13.394464 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41db98f4-c026-42e8-8486-412643968146" containerName="controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394473 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41db98f4-c026-42e8-8486-412643968146" containerName="controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394621 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41db98f4-c026-42e8-8486-412643968146" containerName="controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394693 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" containerName="route-controller-manager" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394712 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17dc565-d8f5-4b5e-8139-1a679d6c99ae" containerName="installer" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.394725 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.395280 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.400062 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.401995 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.402367 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.402404 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.402374 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.404084 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.404372 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.406301 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.412131 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.412926 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.413151 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.413221 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.413883 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.414160 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.415110 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.417316 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.467987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468076 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468124 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcxn7\" (UniqueName: \"kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468217 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xn8b\" (UniqueName: \"kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468303 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468342 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468375 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.468485 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.469160 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.569786 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xn8b\" (UniqueName: \"kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.569946 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.569979 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570021 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570059 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.570101 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcxn7\" (UniqueName: \"kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.572189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.572496 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.572658 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.573115 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.573839 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.575449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.576324 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.593044 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xn8b\" (UniqueName: \"kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b\") pod \"route-controller-manager-795bc96cd6-k5hp2\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.599356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcxn7\" (UniqueName: \"kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7\") pod \"controller-manager-6d77b65dc4-gsxzr\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.735369 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b5ce2a2-8a71-4fbe-a860-c89c1be82f08" path="/var/lib/kubelet/pods/1b5ce2a2-8a71-4fbe-a860-c89c1be82f08/volumes" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.737098 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41db98f4-c026-42e8-8486-412643968146" path="/var/lib/kubelet/pods/41db98f4-c026-42e8-8486-412643968146/volumes" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.754236 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.773727 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:13 crc kubenswrapper[4982]: I0122 05:51:13.996309 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.067055 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:14 crc kubenswrapper[4982]: W0122 05:51:14.078289 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c95e9fe_40c5_4896_9488_e85e4562dd34.slice/crio-cb1c0217ee7cff497fde9a4984b9bb53b9637dd2c33649a6378a121af01ff7fc WatchSource:0}: Error finding container cb1c0217ee7cff497fde9a4984b9bb53b9637dd2c33649a6378a121af01ff7fc: Status 404 returned error can't find the container with id cb1c0217ee7cff497fde9a4984b9bb53b9637dd2c33649a6378a121af01ff7fc Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.377475 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" event={"ID":"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8","Type":"ContainerStarted","Data":"cd55a3a405d411e84bb626c4e23be97649e7e89b52c2cabf0e8f72529b17a45e"} Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.378306 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" event={"ID":"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8","Type":"ContainerStarted","Data":"503293c0908832faf62049706b9f17db2d0ca029942a255ed88b17f3e2d3d8a8"} Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.379530 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.381501 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" event={"ID":"2c95e9fe-40c5-4896-9488-e85e4562dd34","Type":"ContainerStarted","Data":"cbddecf8fbe2f1ce0909a868f12cd46ccad2894dce75e73d9be315c14a5d00fa"} Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.381574 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" event={"ID":"2c95e9fe-40c5-4896-9488-e85e4562dd34","Type":"ContainerStarted","Data":"cb1c0217ee7cff497fde9a4984b9bb53b9637dd2c33649a6378a121af01ff7fc"} Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.382155 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.389951 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.403699 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" podStartSLOduration=3.403680066 podStartE2EDuration="3.403680066s" podCreationTimestamp="2026-01-22 05:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:14.403008718 +0000 UTC m=+335.241646741" watchObservedRunningTime="2026-01-22 05:51:14.403680066 +0000 UTC m=+335.242318089" Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.430497 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" podStartSLOduration=3.430476874 podStartE2EDuration="3.430476874s" podCreationTimestamp="2026-01-22 05:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:14.426163823 +0000 UTC m=+335.264801826" watchObservedRunningTime="2026-01-22 05:51:14.430476874 +0000 UTC m=+335.269114877" Jan 22 05:51:14 crc kubenswrapper[4982]: I0122 05:51:14.614351 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.057995 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.058718 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" podUID="2c95e9fe-40c5-4896-9488-e85e4562dd34" containerName="controller-manager" containerID="cri-o://cbddecf8fbe2f1ce0909a868f12cd46ccad2894dce75e73d9be315c14a5d00fa" gracePeriod=30 Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.089893 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.090126 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" podUID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" containerName="route-controller-manager" containerID="cri-o://cd55a3a405d411e84bb626c4e23be97649e7e89b52c2cabf0e8f72529b17a45e" gracePeriod=30 Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.433627 4982 generic.go:334] "Generic (PLEG): container finished" podID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" containerID="cd55a3a405d411e84bb626c4e23be97649e7e89b52c2cabf0e8f72529b17a45e" exitCode=0 Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.433713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" event={"ID":"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8","Type":"ContainerDied","Data":"cd55a3a405d411e84bb626c4e23be97649e7e89b52c2cabf0e8f72529b17a45e"} Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.435248 4982 generic.go:334] "Generic (PLEG): container finished" podID="2c95e9fe-40c5-4896-9488-e85e4562dd34" containerID="cbddecf8fbe2f1ce0909a868f12cd46ccad2894dce75e73d9be315c14a5d00fa" exitCode=0 Jan 22 05:51:22 crc kubenswrapper[4982]: I0122 05:51:22.435290 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" event={"ID":"2c95e9fe-40c5-4896-9488-e85e4562dd34","Type":"ContainerDied","Data":"cbddecf8fbe2f1ce0909a868f12cd46ccad2894dce75e73d9be315c14a5d00fa"} Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.138634 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.175460 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:23 crc kubenswrapper[4982]: E0122 05:51:23.175834 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" containerName="route-controller-manager" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.176083 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" containerName="route-controller-manager" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.176299 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" containerName="route-controller-manager" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.177007 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.193746 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.215269 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226439 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca\") pod \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226498 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xn8b\" (UniqueName: \"kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b\") pod \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226558 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config\") pod \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226643 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert\") pod \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\" (UID: \"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226819 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c92z\" (UniqueName: \"kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226882 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226924 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.226954 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.230081 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca" (OuterVolumeSpecName: "client-ca") pod "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" (UID: "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.230589 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config" (OuterVolumeSpecName: "config") pod "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" (UID: "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.236343 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b" (OuterVolumeSpecName: "kube-api-access-4xn8b") pod "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" (UID: "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8"). InnerVolumeSpecName "kube-api-access-4xn8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.237037 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" (UID: "b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328131 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca\") pod \"2c95e9fe-40c5-4896-9488-e85e4562dd34\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328202 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert\") pod \"2c95e9fe-40c5-4896-9488-e85e4562dd34\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328252 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcxn7\" (UniqueName: \"kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7\") pod \"2c95e9fe-40c5-4896-9488-e85e4562dd34\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328287 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config\") pod \"2c95e9fe-40c5-4896-9488-e85e4562dd34\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328402 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles\") pod \"2c95e9fe-40c5-4896-9488-e85e4562dd34\" (UID: \"2c95e9fe-40c5-4896-9488-e85e4562dd34\") " Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328647 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c92z\" (UniqueName: \"kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328697 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328780 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328844 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328886 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328899 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xn8b\" (UniqueName: \"kubernetes.io/projected/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-kube-api-access-4xn8b\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.328913 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.329310 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2c95e9fe-40c5-4896-9488-e85e4562dd34" (UID: "2c95e9fe-40c5-4896-9488-e85e4562dd34"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.329305 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca" (OuterVolumeSpecName: "client-ca") pod "2c95e9fe-40c5-4896-9488-e85e4562dd34" (UID: "2c95e9fe-40c5-4896-9488-e85e4562dd34"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.329884 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config" (OuterVolumeSpecName: "config") pod "2c95e9fe-40c5-4896-9488-e85e4562dd34" (UID: "2c95e9fe-40c5-4896-9488-e85e4562dd34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.330471 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.330650 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.332002 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2c95e9fe-40c5-4896-9488-e85e4562dd34" (UID: "2c95e9fe-40c5-4896-9488-e85e4562dd34"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.333121 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.333132 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7" (OuterVolumeSpecName: "kube-api-access-fcxn7") pod "2c95e9fe-40c5-4896-9488-e85e4562dd34" (UID: "2c95e9fe-40c5-4896-9488-e85e4562dd34"). InnerVolumeSpecName "kube-api-access-fcxn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.345908 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c92z\" (UniqueName: \"kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z\") pod \"route-controller-manager-7f48b7b747-ksvjl\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.430445 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c95e9fe-40c5-4896-9488-e85e4562dd34-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.430481 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcxn7\" (UniqueName: \"kubernetes.io/projected/2c95e9fe-40c5-4896-9488-e85e4562dd34-kube-api-access-fcxn7\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.430495 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.430504 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.430514 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c95e9fe-40c5-4896-9488-e85e4562dd34-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.442119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" event={"ID":"b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8","Type":"ContainerDied","Data":"503293c0908832faf62049706b9f17db2d0ca029942a255ed88b17f3e2d3d8a8"} Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.442178 4982 scope.go:117] "RemoveContainer" containerID="cd55a3a405d411e84bb626c4e23be97649e7e89b52c2cabf0e8f72529b17a45e" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.442287 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.444303 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" event={"ID":"2c95e9fe-40c5-4896-9488-e85e4562dd34","Type":"ContainerDied","Data":"cb1c0217ee7cff497fde9a4984b9bb53b9637dd2c33649a6378a121af01ff7fc"} Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.444471 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.466934 4982 scope.go:117] "RemoveContainer" containerID="cbddecf8fbe2f1ce0909a868f12cd46ccad2894dce75e73d9be315c14a5d00fa" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.482312 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.490138 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6d77b65dc4-gsxzr"] Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.500784 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.510011 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-795bc96cd6-k5hp2"] Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.520278 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.725380 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c95e9fe-40c5-4896-9488-e85e4562dd34" path="/var/lib/kubelet/pods/2c95e9fe-40c5-4896-9488-e85e4562dd34/volumes" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.725962 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8" path="/var/lib/kubelet/pods/b7b4879c-d0d5-45a3-b1ba-d68d225bc5a8/volumes" Jan 22 05:51:23 crc kubenswrapper[4982]: I0122 05:51:23.962424 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:24 crc kubenswrapper[4982]: I0122 05:51:24.459455 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" event={"ID":"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e","Type":"ContainerStarted","Data":"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b"} Jan 22 05:51:24 crc kubenswrapper[4982]: I0122 05:51:24.460119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" event={"ID":"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e","Type":"ContainerStarted","Data":"d7addd78e4d47dc071dab769d8a2909cd4118d8952fea32e4faacb1710616952"} Jan 22 05:51:24 crc kubenswrapper[4982]: I0122 05:51:24.484313 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" podStartSLOduration=2.484284992 podStartE2EDuration="2.484284992s" podCreationTimestamp="2026-01-22 05:51:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:24.481826183 +0000 UTC m=+345.320464186" watchObservedRunningTime="2026-01-22 05:51:24.484284992 +0000 UTC m=+345.322923035" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.390685 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:25 crc kubenswrapper[4982]: E0122 05:51:25.391005 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c95e9fe-40c5-4896-9488-e85e4562dd34" containerName="controller-manager" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.391022 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c95e9fe-40c5-4896-9488-e85e4562dd34" containerName="controller-manager" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.391148 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c95e9fe-40c5-4896-9488-e85e4562dd34" containerName="controller-manager" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.391612 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.394420 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.394481 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.394819 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.394825 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.395101 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.395179 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.402948 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.407392 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.461158 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwxtn\" (UniqueName: \"kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.461240 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.461311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.461333 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.461381 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.465399 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.470289 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.563309 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.563398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.563494 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.563752 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwxtn\" (UniqueName: \"kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.563894 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.565697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.565976 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.566929 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.571601 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.589675 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwxtn\" (UniqueName: \"kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn\") pod \"controller-manager-66b6f984d5-ntv6m\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:25 crc kubenswrapper[4982]: I0122 05:51:25.712468 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.271838 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:26 crc kubenswrapper[4982]: W0122 05:51:26.280282 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7843233_5cf1_4536_9418_e3c6af70c9d5.slice/crio-df49a049d50d43e165c502fa2d90dbdcea42cd04988d8e0e8fdb0ebd9cfc8297 WatchSource:0}: Error finding container df49a049d50d43e165c502fa2d90dbdcea42cd04988d8e0e8fdb0ebd9cfc8297: Status 404 returned error can't find the container with id df49a049d50d43e165c502fa2d90dbdcea42cd04988d8e0e8fdb0ebd9cfc8297 Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.476251 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" event={"ID":"c7843233-5cf1-4536-9418-e3c6af70c9d5","Type":"ContainerStarted","Data":"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4"} Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.476712 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" event={"ID":"c7843233-5cf1-4536-9418-e3c6af70c9d5","Type":"ContainerStarted","Data":"df49a049d50d43e165c502fa2d90dbdcea42cd04988d8e0e8fdb0ebd9cfc8297"} Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.478476 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.480928 4982 patch_prober.go:28] interesting pod/controller-manager-66b6f984d5-ntv6m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.480977 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Jan 22 05:51:26 crc kubenswrapper[4982]: I0122 05:51:26.506195 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" podStartSLOduration=4.5061634139999995 podStartE2EDuration="4.506163414s" podCreationTimestamp="2026-01-22 05:51:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:26.499203889 +0000 UTC m=+347.337841902" watchObservedRunningTime="2026-01-22 05:51:26.506163414 +0000 UTC m=+347.344801407" Jan 22 05:51:27 crc kubenswrapper[4982]: I0122 05:51:27.484958 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:37 crc kubenswrapper[4982]: I0122 05:51:37.874546 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:37 crc kubenswrapper[4982]: I0122 05:51:37.875579 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerName="controller-manager" containerID="cri-o://fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4" gracePeriod=30 Jan 22 05:51:37 crc kubenswrapper[4982]: I0122 05:51:37.892131 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:37 crc kubenswrapper[4982]: I0122 05:51:37.892436 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" podUID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" containerName="route-controller-manager" containerID="cri-o://12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b" gracePeriod=30 Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.404447 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.457595 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config\") pod \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.457772 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert\") pod \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.457793 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca\") pod \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.457819 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c92z\" (UniqueName: \"kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z\") pod \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\" (UID: \"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.459206 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca" (OuterVolumeSpecName: "client-ca") pod "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" (UID: "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.459218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config" (OuterVolumeSpecName: "config") pod "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" (UID: "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.486716 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z" (OuterVolumeSpecName: "kube-api-access-2c92z") pod "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" (UID: "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e"). InnerVolumeSpecName "kube-api-access-2c92z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.488210 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" (UID: "b04a2a4c-1418-4f71-9a64-cb8eb5695e5e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.500473 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.555020 4982 generic.go:334] "Generic (PLEG): container finished" podID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerID="fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4" exitCode=0 Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.555168 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" event={"ID":"c7843233-5cf1-4536-9418-e3c6af70c9d5","Type":"ContainerDied","Data":"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4"} Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.555227 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" event={"ID":"c7843233-5cf1-4536-9418-e3c6af70c9d5","Type":"ContainerDied","Data":"df49a049d50d43e165c502fa2d90dbdcea42cd04988d8e0e8fdb0ebd9cfc8297"} Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.555250 4982 scope.go:117] "RemoveContainer" containerID="fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.555435 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-ntv6m" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.558602 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca\") pod \"c7843233-5cf1-4536-9418-e3c6af70c9d5\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.558675 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles\") pod \"c7843233-5cf1-4536-9418-e3c6af70c9d5\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.558720 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config\") pod \"c7843233-5cf1-4536-9418-e3c6af70c9d5\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.558799 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwxtn\" (UniqueName: \"kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn\") pod \"c7843233-5cf1-4536-9418-e3c6af70c9d5\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.558934 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert\") pod \"c7843233-5cf1-4536-9418-e3c6af70c9d5\" (UID: \"c7843233-5cf1-4536-9418-e3c6af70c9d5\") " Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559222 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559237 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c92z\" (UniqueName: \"kubernetes.io/projected/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-kube-api-access-2c92z\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559251 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559264 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559610 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c7843233-5cf1-4536-9418-e3c6af70c9d5" (UID: "c7843233-5cf1-4536-9418-e3c6af70c9d5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.559640 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca" (OuterVolumeSpecName: "client-ca") pod "c7843233-5cf1-4536-9418-e3c6af70c9d5" (UID: "c7843233-5cf1-4536-9418-e3c6af70c9d5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.560272 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config" (OuterVolumeSpecName: "config") pod "c7843233-5cf1-4536-9418-e3c6af70c9d5" (UID: "c7843233-5cf1-4536-9418-e3c6af70c9d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.560302 4982 generic.go:334] "Generic (PLEG): container finished" podID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" containerID="12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b" exitCode=0 Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.560341 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" event={"ID":"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e","Type":"ContainerDied","Data":"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b"} Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.560370 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" event={"ID":"b04a2a4c-1418-4f71-9a64-cb8eb5695e5e","Type":"ContainerDied","Data":"d7addd78e4d47dc071dab769d8a2909cd4118d8952fea32e4faacb1710616952"} Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.560386 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.564057 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn" (OuterVolumeSpecName: "kube-api-access-zwxtn") pod "c7843233-5cf1-4536-9418-e3c6af70c9d5" (UID: "c7843233-5cf1-4536-9418-e3c6af70c9d5"). InnerVolumeSpecName "kube-api-access-zwxtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.564134 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c7843233-5cf1-4536-9418-e3c6af70c9d5" (UID: "c7843233-5cf1-4536-9418-e3c6af70c9d5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.585507 4982 scope.go:117] "RemoveContainer" containerID="fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4" Jan 22 05:51:38 crc kubenswrapper[4982]: E0122 05:51:38.587054 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4\": container with ID starting with fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4 not found: ID does not exist" containerID="fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.587122 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4"} err="failed to get container status \"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4\": rpc error: code = NotFound desc = could not find container \"fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4\": container with ID starting with fe4f37dcf76abcd0c334d82c7ce42a4cb9546ddc6088d3665c8225e40c1b3eb4 not found: ID does not exist" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.587147 4982 scope.go:117] "RemoveContainer" containerID="12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.588673 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.592381 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-ksvjl"] Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.603869 4982 scope.go:117] "RemoveContainer" containerID="12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b" Jan 22 05:51:38 crc kubenswrapper[4982]: E0122 05:51:38.604419 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b\": container with ID starting with 12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b not found: ID does not exist" containerID="12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.604481 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b"} err="failed to get container status \"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b\": rpc error: code = NotFound desc = could not find container \"12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b\": container with ID starting with 12ddcb577bb8f6160aafa10cc0dcf75de64fcd6820e375ed399defd12dda1a9b not found: ID does not exist" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.660830 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7843233-5cf1-4536-9418-e3c6af70c9d5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.660883 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.660896 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.660915 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7843233-5cf1-4536-9418-e3c6af70c9d5-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.660929 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwxtn\" (UniqueName: \"kubernetes.io/projected/c7843233-5cf1-4536-9418-e3c6af70c9d5-kube-api-access-zwxtn\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.896067 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:38 crc kubenswrapper[4982]: I0122 05:51:38.900705 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-ntv6m"] Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.405207 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:39 crc kubenswrapper[4982]: E0122 05:51:39.405496 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerName="controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.405514 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerName="controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: E0122 05:51:39.405532 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" containerName="route-controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.405541 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" containerName="route-controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.405686 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" containerName="controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.405702 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" containerName="route-controller-manager" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.406273 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.410495 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.410524 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.410524 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.410930 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.411666 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.411746 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.411833 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.411888 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.417829 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.419005 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.419113 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.419008 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.419402 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.419514 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.423160 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.430981 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.437298 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485549 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485687 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485723 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tndf5\" (UniqueName: \"kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485772 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485825 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485881 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc2f2\" (UniqueName: \"kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.485914 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.587778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.587876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.587924 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tndf5\" (UniqueName: \"kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.587994 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.588037 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.588075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.588123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc2f2\" (UniqueName: \"kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.588169 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.588226 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.589139 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.589844 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.590449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.591265 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.591342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.599178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.604263 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.606905 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.607053 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.615356 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.616722 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.630678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc2f2\" (UniqueName: \"kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2\") pod \"controller-manager-6c746dc547-95wrs\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.630947 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tndf5\" (UniqueName: \"kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5\") pod \"route-controller-manager-777d4f5c79-gmbws\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.730447 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b04a2a4c-1418-4f71-9a64-cb8eb5695e5e" path="/var/lib/kubelet/pods/b04a2a4c-1418-4f71-9a64-cb8eb5695e5e/volumes" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.731922 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7843233-5cf1-4536-9418-e3c6af70c9d5" path="/var/lib/kubelet/pods/c7843233-5cf1-4536-9418-e3c6af70c9d5/volumes" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.752329 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.759464 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.764233 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:51:39 crc kubenswrapper[4982]: I0122 05:51:39.771669 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.227047 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:40 crc kubenswrapper[4982]: W0122 05:51:40.233589 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3ea7c26_a2bd_41b7_8ad6_d895e456d351.slice/crio-94c71247ad09fa6579a350fd1cef30519137e6fd15bab03f0af3f00187ef0ba8 WatchSource:0}: Error finding container 94c71247ad09fa6579a350fd1cef30519137e6fd15bab03f0af3f00187ef0ba8: Status 404 returned error can't find the container with id 94c71247ad09fa6579a350fd1cef30519137e6fd15bab03f0af3f00187ef0ba8 Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.291172 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:40 crc kubenswrapper[4982]: W0122 05:51:40.299490 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe5cd431_3597_4776_a433_5d7d8a5db08d.slice/crio-3a03ea206240b1774507cd23cf0673089857f849f9500615347daefe774071f6 WatchSource:0}: Error finding container 3a03ea206240b1774507cd23cf0673089857f849f9500615347daefe774071f6: Status 404 returned error can't find the container with id 3a03ea206240b1774507cd23cf0673089857f849f9500615347daefe774071f6 Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.580464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" event={"ID":"fe5cd431-3597-4776-a433-5d7d8a5db08d","Type":"ContainerStarted","Data":"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0"} Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.580544 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" event={"ID":"fe5cd431-3597-4776-a433-5d7d8a5db08d","Type":"ContainerStarted","Data":"3a03ea206240b1774507cd23cf0673089857f849f9500615347daefe774071f6"} Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.580981 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.582917 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" event={"ID":"e3ea7c26-a2bd-41b7-8ad6-d895e456d351","Type":"ContainerStarted","Data":"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9"} Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.582980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" event={"ID":"e3ea7c26-a2bd-41b7-8ad6-d895e456d351","Type":"ContainerStarted","Data":"94c71247ad09fa6579a350fd1cef30519137e6fd15bab03f0af3f00187ef0ba8"} Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.583472 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.590388 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.623235 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" podStartSLOduration=3.6231893360000003 podStartE2EDuration="3.623189336s" podCreationTimestamp="2026-01-22 05:51:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:40.615206893 +0000 UTC m=+361.453844906" watchObservedRunningTime="2026-01-22 05:51:40.623189336 +0000 UTC m=+361.461827369" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.664122 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" podStartSLOduration=3.664104936 podStartE2EDuration="3.664104936s" podCreationTimestamp="2026-01-22 05:51:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:40.663557801 +0000 UTC m=+361.502195814" watchObservedRunningTime="2026-01-22 05:51:40.664104936 +0000 UTC m=+361.502742939" Jan 22 05:51:40 crc kubenswrapper[4982]: I0122 05:51:40.867972 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:46 crc kubenswrapper[4982]: I0122 05:51:46.768846 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:46 crc kubenswrapper[4982]: I0122 05:51:46.771089 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" podUID="fe5cd431-3597-4776-a433-5d7d8a5db08d" containerName="controller-manager" containerID="cri-o://c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0" gracePeriod=30 Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.393127 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.541124 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc2f2\" (UniqueName: \"kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2\") pod \"fe5cd431-3597-4776-a433-5d7d8a5db08d\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.541185 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles\") pod \"fe5cd431-3597-4776-a433-5d7d8a5db08d\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.541223 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca\") pod \"fe5cd431-3597-4776-a433-5d7d8a5db08d\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.541251 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config\") pod \"fe5cd431-3597-4776-a433-5d7d8a5db08d\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.541310 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert\") pod \"fe5cd431-3597-4776-a433-5d7d8a5db08d\" (UID: \"fe5cd431-3597-4776-a433-5d7d8a5db08d\") " Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.542046 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca" (OuterVolumeSpecName: "client-ca") pod "fe5cd431-3597-4776-a433-5d7d8a5db08d" (UID: "fe5cd431-3597-4776-a433-5d7d8a5db08d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.542125 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "fe5cd431-3597-4776-a433-5d7d8a5db08d" (UID: "fe5cd431-3597-4776-a433-5d7d8a5db08d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.542413 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config" (OuterVolumeSpecName: "config") pod "fe5cd431-3597-4776-a433-5d7d8a5db08d" (UID: "fe5cd431-3597-4776-a433-5d7d8a5db08d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.565494 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fe5cd431-3597-4776-a433-5d7d8a5db08d" (UID: "fe5cd431-3597-4776-a433-5d7d8a5db08d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.565595 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2" (OuterVolumeSpecName: "kube-api-access-lc2f2") pod "fe5cd431-3597-4776-a433-5d7d8a5db08d" (UID: "fe5cd431-3597-4776-a433-5d7d8a5db08d"). InnerVolumeSpecName "kube-api-access-lc2f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.631094 4982 generic.go:334] "Generic (PLEG): container finished" podID="fe5cd431-3597-4776-a433-5d7d8a5db08d" containerID="c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0" exitCode=0 Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.631162 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" event={"ID":"fe5cd431-3597-4776-a433-5d7d8a5db08d","Type":"ContainerDied","Data":"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0"} Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.631200 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" event={"ID":"fe5cd431-3597-4776-a433-5d7d8a5db08d","Type":"ContainerDied","Data":"3a03ea206240b1774507cd23cf0673089857f849f9500615347daefe774071f6"} Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.631246 4982 scope.go:117] "RemoveContainer" containerID="c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.634074 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c746dc547-95wrs" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.642716 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc2f2\" (UniqueName: \"kubernetes.io/projected/fe5cd431-3597-4776-a433-5d7d8a5db08d-kube-api-access-lc2f2\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.642752 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.642764 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.642775 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe5cd431-3597-4776-a433-5d7d8a5db08d-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.642788 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe5cd431-3597-4776-a433-5d7d8a5db08d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.648988 4982 scope.go:117] "RemoveContainer" containerID="c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0" Jan 22 05:51:47 crc kubenswrapper[4982]: E0122 05:51:47.649651 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0\": container with ID starting with c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0 not found: ID does not exist" containerID="c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.649736 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0"} err="failed to get container status \"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0\": rpc error: code = NotFound desc = could not find container \"c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0\": container with ID starting with c047ade5d7d04e331eb23fbc53a95049141b539570ec892ef7729167da7c43d0 not found: ID does not exist" Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.667287 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.672758 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6c746dc547-95wrs"] Jan 22 05:51:47 crc kubenswrapper[4982]: I0122 05:51:47.729671 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5cd431-3597-4776-a433-5d7d8a5db08d" path="/var/lib/kubelet/pods/fe5cd431-3597-4776-a433-5d7d8a5db08d/volumes" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.408235 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-l26fw"] Jan 22 05:51:48 crc kubenswrapper[4982]: E0122 05:51:48.408535 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5cd431-3597-4776-a433-5d7d8a5db08d" containerName="controller-manager" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.408552 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5cd431-3597-4776-a433-5d7d8a5db08d" containerName="controller-manager" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.408659 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5cd431-3597-4776-a433-5d7d8a5db08d" containerName="controller-manager" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.409068 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.412153 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.413010 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.413136 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.413406 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.413555 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.419697 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.422240 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.471873 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-l26fw"] Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.555726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.555800 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-client-ca\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.555844 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-config\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.555909 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/861aaef6-69b0-4607-af3a-d238ae41ac38-serving-cert\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.555973 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnrlt\" (UniqueName: \"kubernetes.io/projected/861aaef6-69b0-4607-af3a-d238ae41ac38-kube-api-access-vnrlt\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.656902 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-client-ca\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.656948 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-config\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.656974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/861aaef6-69b0-4607-af3a-d238ae41ac38-serving-cert\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.657004 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnrlt\" (UniqueName: \"kubernetes.io/projected/861aaef6-69b0-4607-af3a-d238ae41ac38-kube-api-access-vnrlt\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.657044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.658195 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-client-ca\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.659033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-config\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.659049 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/861aaef6-69b0-4607-af3a-d238ae41ac38-proxy-ca-bundles\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.663886 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/861aaef6-69b0-4607-af3a-d238ae41ac38-serving-cert\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.676246 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnrlt\" (UniqueName: \"kubernetes.io/projected/861aaef6-69b0-4607-af3a-d238ae41ac38-kube-api-access-vnrlt\") pod \"controller-manager-66b6f984d5-l26fw\" (UID: \"861aaef6-69b0-4607-af3a-d238ae41ac38\") " pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.724799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.974059 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:51:48 crc kubenswrapper[4982]: I0122 05:51:48.974124 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.003465 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b6f984d5-l26fw"] Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.645135 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" event={"ID":"861aaef6-69b0-4607-af3a-d238ae41ac38","Type":"ContainerStarted","Data":"dfe0581a41c8095b3955b073b1bcaead559fe3ea59ca3890c00b98bdfb23a79f"} Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.645595 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.645645 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" event={"ID":"861aaef6-69b0-4607-af3a-d238ae41ac38","Type":"ContainerStarted","Data":"bd2b502cf9e02c9a8ddfe856c4a546360cdb83091f898a615d614e66d09a39ab"} Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.652089 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" Jan 22 05:51:49 crc kubenswrapper[4982]: I0122 05:51:49.695901 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66b6f984d5-l26fw" podStartSLOduration=3.695872579 podStartE2EDuration="3.695872579s" podCreationTimestamp="2026-01-22 05:51:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:49.690659824 +0000 UTC m=+370.529297837" watchObservedRunningTime="2026-01-22 05:51:49.695872579 +0000 UTC m=+370.534510592" Jan 22 05:51:51 crc kubenswrapper[4982]: I0122 05:51:51.679424 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:51 crc kubenswrapper[4982]: I0122 05:51:51.680043 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" podUID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" containerName="route-controller-manager" containerID="cri-o://c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9" gracePeriod=30 Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.177678 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.340878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config\") pod \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.340953 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert\") pod \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.341003 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca\") pod \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.341070 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tndf5\" (UniqueName: \"kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5\") pod \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\" (UID: \"e3ea7c26-a2bd-41b7-8ad6-d895e456d351\") " Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.342798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca" (OuterVolumeSpecName: "client-ca") pod "e3ea7c26-a2bd-41b7-8ad6-d895e456d351" (UID: "e3ea7c26-a2bd-41b7-8ad6-d895e456d351"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.342920 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config" (OuterVolumeSpecName: "config") pod "e3ea7c26-a2bd-41b7-8ad6-d895e456d351" (UID: "e3ea7c26-a2bd-41b7-8ad6-d895e456d351"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.348219 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5" (OuterVolumeSpecName: "kube-api-access-tndf5") pod "e3ea7c26-a2bd-41b7-8ad6-d895e456d351" (UID: "e3ea7c26-a2bd-41b7-8ad6-d895e456d351"). InnerVolumeSpecName "kube-api-access-tndf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.365479 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e3ea7c26-a2bd-41b7-8ad6-d895e456d351" (UID: "e3ea7c26-a2bd-41b7-8ad6-d895e456d351"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.442896 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.442958 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.442980 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tndf5\" (UniqueName: \"kubernetes.io/projected/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-kube-api-access-tndf5\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.443002 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3ea7c26-a2bd-41b7-8ad6-d895e456d351-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.661963 4982 generic.go:334] "Generic (PLEG): container finished" podID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" containerID="c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9" exitCode=0 Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.662019 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" event={"ID":"e3ea7c26-a2bd-41b7-8ad6-d895e456d351","Type":"ContainerDied","Data":"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9"} Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.662023 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.662062 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws" event={"ID":"e3ea7c26-a2bd-41b7-8ad6-d895e456d351","Type":"ContainerDied","Data":"94c71247ad09fa6579a350fd1cef30519137e6fd15bab03f0af3f00187ef0ba8"} Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.662092 4982 scope.go:117] "RemoveContainer" containerID="c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.684168 4982 scope.go:117] "RemoveContainer" containerID="c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9" Jan 22 05:51:52 crc kubenswrapper[4982]: E0122 05:51:52.684620 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9\": container with ID starting with c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9 not found: ID does not exist" containerID="c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.684671 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9"} err="failed to get container status \"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9\": rpc error: code = NotFound desc = could not find container \"c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9\": container with ID starting with c0b0396ab0bec81671ea4f84742143a65110ba736e175494a1b15a304a0112b9 not found: ID does not exist" Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.696748 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:52 crc kubenswrapper[4982]: I0122 05:51:52.699662 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-777d4f5c79-gmbws"] Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.416570 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j"] Jan 22 05:51:53 crc kubenswrapper[4982]: E0122 05:51:53.417355 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" containerName="route-controller-manager" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.417376 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" containerName="route-controller-manager" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.417559 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" containerName="route-controller-manager" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.418772 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.421176 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.423040 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.423508 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.423774 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.424330 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.424990 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.442274 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j"] Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.561721 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-client-ca\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.561838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv9t2\" (UniqueName: \"kubernetes.io/projected/958f44bb-27e2-4ad0-a228-a8935d61fad8-kube-api-access-nv9t2\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.561934 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958f44bb-27e2-4ad0-a228-a8935d61fad8-serving-cert\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.562009 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-config\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.663916 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958f44bb-27e2-4ad0-a228-a8935d61fad8-serving-cert\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.664123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-config\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.664315 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-client-ca\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.664399 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv9t2\" (UniqueName: \"kubernetes.io/projected/958f44bb-27e2-4ad0-a228-a8935d61fad8-kube-api-access-nv9t2\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.666473 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-config\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.667736 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958f44bb-27e2-4ad0-a228-a8935d61fad8-client-ca\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.676910 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958f44bb-27e2-4ad0-a228-a8935d61fad8-serving-cert\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.695309 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv9t2\" (UniqueName: \"kubernetes.io/projected/958f44bb-27e2-4ad0-a228-a8935d61fad8-kube-api-access-nv9t2\") pod \"route-controller-manager-7f48b7b747-jc74j\" (UID: \"958f44bb-27e2-4ad0-a228-a8935d61fad8\") " pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.728103 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3ea7c26-a2bd-41b7-8ad6-d895e456d351" path="/var/lib/kubelet/pods/e3ea7c26-a2bd-41b7-8ad6-d895e456d351/volumes" Jan 22 05:51:53 crc kubenswrapper[4982]: I0122 05:51:53.746037 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:54 crc kubenswrapper[4982]: I0122 05:51:54.279620 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j"] Jan 22 05:51:54 crc kubenswrapper[4982]: W0122 05:51:54.280969 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod958f44bb_27e2_4ad0_a228_a8935d61fad8.slice/crio-95889a513eccc011a927d754a6b11ebe005614d638c8fee28abfbf7f0e9d2efe WatchSource:0}: Error finding container 95889a513eccc011a927d754a6b11ebe005614d638c8fee28abfbf7f0e9d2efe: Status 404 returned error can't find the container with id 95889a513eccc011a927d754a6b11ebe005614d638c8fee28abfbf7f0e9d2efe Jan 22 05:51:54 crc kubenswrapper[4982]: I0122 05:51:54.682743 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" event={"ID":"958f44bb-27e2-4ad0-a228-a8935d61fad8","Type":"ContainerStarted","Data":"f61d26f4966d75482550b42e3291a036895c85b4a0eb80af165c82a059e10920"} Jan 22 05:51:54 crc kubenswrapper[4982]: I0122 05:51:54.683275 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:54 crc kubenswrapper[4982]: I0122 05:51:54.683307 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" event={"ID":"958f44bb-27e2-4ad0-a228-a8935d61fad8","Type":"ContainerStarted","Data":"95889a513eccc011a927d754a6b11ebe005614d638c8fee28abfbf7f0e9d2efe"} Jan 22 05:51:54 crc kubenswrapper[4982]: I0122 05:51:54.705996 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" podStartSLOduration=3.705977534 podStartE2EDuration="3.705977534s" podCreationTimestamp="2026-01-22 05:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:51:54.70296537 +0000 UTC m=+375.541603383" watchObservedRunningTime="2026-01-22 05:51:54.705977534 +0000 UTC m=+375.544615527" Jan 22 05:51:55 crc kubenswrapper[4982]: I0122 05:51:55.359161 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f48b7b747-jc74j" Jan 22 05:51:58 crc kubenswrapper[4982]: I0122 05:51:58.770109 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:51:58 crc kubenswrapper[4982]: I0122 05:51:58.771373 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pjnnz" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="registry-server" containerID="cri-o://ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0" gracePeriod=2 Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.410829 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.430642 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content\") pod \"d4f84179-f77d-4daa-82e5-04398aa15339\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.430765 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities\") pod \"d4f84179-f77d-4daa-82e5-04398aa15339\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.430812 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7hmc\" (UniqueName: \"kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc\") pod \"d4f84179-f77d-4daa-82e5-04398aa15339\" (UID: \"d4f84179-f77d-4daa-82e5-04398aa15339\") " Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.433077 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities" (OuterVolumeSpecName: "utilities") pod "d4f84179-f77d-4daa-82e5-04398aa15339" (UID: "d4f84179-f77d-4daa-82e5-04398aa15339"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.449254 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc" (OuterVolumeSpecName: "kube-api-access-l7hmc") pod "d4f84179-f77d-4daa-82e5-04398aa15339" (UID: "d4f84179-f77d-4daa-82e5-04398aa15339"). InnerVolumeSpecName "kube-api-access-l7hmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.462165 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4f84179-f77d-4daa-82e5-04398aa15339" (UID: "d4f84179-f77d-4daa-82e5-04398aa15339"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.532709 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.532746 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7hmc\" (UniqueName: \"kubernetes.io/projected/d4f84179-f77d-4daa-82e5-04398aa15339-kube-api-access-l7hmc\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.533429 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4f84179-f77d-4daa-82e5-04398aa15339-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.717496 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4f84179-f77d-4daa-82e5-04398aa15339" containerID="ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0" exitCode=0 Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.717550 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerDied","Data":"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0"} Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.717583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjnnz" event={"ID":"d4f84179-f77d-4daa-82e5-04398aa15339","Type":"ContainerDied","Data":"72e21f9abee13ca21a53b6da41420bdd7d781ff8fe01bd309dc9356f0ab93b7b"} Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.717609 4982 scope.go:117] "RemoveContainer" containerID="ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.717679 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjnnz" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.738145 4982 scope.go:117] "RemoveContainer" containerID="127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.768257 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.778829 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjnnz"] Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.797449 4982 scope.go:117] "RemoveContainer" containerID="0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.819883 4982 scope.go:117] "RemoveContainer" containerID="ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0" Jan 22 05:51:59 crc kubenswrapper[4982]: E0122 05:51:59.820625 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0\": container with ID starting with ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0 not found: ID does not exist" containerID="ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.820700 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0"} err="failed to get container status \"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0\": rpc error: code = NotFound desc = could not find container \"ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0\": container with ID starting with ffa854479b114a01539fcaa4aad0306d0d39e3fe05d0da4317eb4e93135797c0 not found: ID does not exist" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.820749 4982 scope.go:117] "RemoveContainer" containerID="127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e" Jan 22 05:51:59 crc kubenswrapper[4982]: E0122 05:51:59.821483 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e\": container with ID starting with 127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e not found: ID does not exist" containerID="127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.821527 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e"} err="failed to get container status \"127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e\": rpc error: code = NotFound desc = could not find container \"127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e\": container with ID starting with 127d2c0b41b75f270f5ff09b53ced980a0c7672e28ce2a9017d66ac9e82e131e not found: ID does not exist" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.821563 4982 scope.go:117] "RemoveContainer" containerID="0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1" Jan 22 05:51:59 crc kubenswrapper[4982]: E0122 05:51:59.821923 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1\": container with ID starting with 0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1 not found: ID does not exist" containerID="0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1" Jan 22 05:51:59 crc kubenswrapper[4982]: I0122 05:51:59.821972 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1"} err="failed to get container status \"0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1\": rpc error: code = NotFound desc = could not find container \"0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1\": container with ID starting with 0128b527a2239dea67785e5532c2b174421d2cc02966f8e131b468f3a38ed9b1 not found: ID does not exist" Jan 22 05:52:01 crc kubenswrapper[4982]: I0122 05:52:01.728381 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" path="/var/lib/kubelet/pods/d4f84179-f77d-4daa-82e5-04398aa15339/volumes" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.994902 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-trfnj"] Jan 22 05:52:04 crc kubenswrapper[4982]: E0122 05:52:04.995636 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="registry-server" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.995654 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="registry-server" Jan 22 05:52:04 crc kubenswrapper[4982]: E0122 05:52:04.995678 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="extract-content" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.995687 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="extract-content" Jan 22 05:52:04 crc kubenswrapper[4982]: E0122 05:52:04.995707 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="extract-utilities" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.995715 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="extract-utilities" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.995844 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4f84179-f77d-4daa-82e5-04398aa15339" containerName="registry-server" Jan 22 05:52:04 crc kubenswrapper[4982]: I0122 05:52:04.996432 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.017241 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-trfnj"] Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.017906 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-tls\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.017997 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.018031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-trusted-ca\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.018055 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.018440 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-certificates\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.064045 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.119788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctdq7\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-kube-api-access-ctdq7\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-trusted-ca\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120276 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-certificates\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-bound-sa-token\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.120337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-tls\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.121581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.122315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-trusted-ca\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.122328 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-certificates\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.129090 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-registry-tls\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.221705 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-bound-sa-token\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.221796 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctdq7\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-kube-api-access-ctdq7\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.221819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.226681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.240967 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-bound-sa-token\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.243684 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctdq7\" (UniqueName: \"kubernetes.io/projected/c9a2cd4b-4091-4ce8-8b10-c7369e5fd029-kube-api-access-ctdq7\") pod \"image-registry-66df7c8f76-trfnj\" (UID: \"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029\") " pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.318623 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:05 crc kubenswrapper[4982]: I0122 05:52:05.777458 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-trfnj"] Jan 22 05:52:06 crc kubenswrapper[4982]: I0122 05:52:06.777592 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" event={"ID":"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029","Type":"ContainerStarted","Data":"2b4722349d8a6569160ff439b14f845542d292c4c091602ff29466dbbf2c7afc"} Jan 22 05:52:06 crc kubenswrapper[4982]: I0122 05:52:06.778425 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:06 crc kubenswrapper[4982]: I0122 05:52:06.778460 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" event={"ID":"c9a2cd4b-4091-4ce8-8b10-c7369e5fd029","Type":"ContainerStarted","Data":"a643bb6b9882d5f90e66e5db88f6718104a12efc436230049b6cf23349e9c586"} Jan 22 05:52:06 crc kubenswrapper[4982]: I0122 05:52:06.809145 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" podStartSLOduration=2.809116828 podStartE2EDuration="2.809116828s" podCreationTimestamp="2026-01-22 05:52:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:52:06.806214007 +0000 UTC m=+387.644852060" watchObservedRunningTime="2026-01-22 05:52:06.809116828 +0000 UTC m=+387.647754861" Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.907883 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.908996 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4257l" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="registry-server" containerID="cri-o://3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" gracePeriod=30 Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.920584 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.926282 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dw5wf" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="registry-server" containerID="cri-o://9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" gracePeriod=30 Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.932306 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.932620 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" containerID="cri-o://377086b49c983b259442fa1c81950fcac3fbb2aaae0a1c044e59d12e5ba25d56" gracePeriod=30 Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.946979 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.947241 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zt96v" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="registry-server" containerID="cri-o://c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a" gracePeriod=30 Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.963743 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.964587 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rvmwc" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="registry-server" containerID="cri-o://8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb" gracePeriod=30 Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.967070 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghnt5"] Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.967957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:14 crc kubenswrapper[4982]: I0122 05:52:14.989888 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghnt5"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.090214 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 is running failed: container process not found" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.093928 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 is running failed: container process not found" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.094278 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 is running failed: container process not found" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.094309 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-4257l" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="registry-server" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.102319 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd8w9\" (UniqueName: \"kubernetes.io/projected/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-kube-api-access-hd8w9\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.102428 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.102451 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.203610 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd8w9\" (UniqueName: \"kubernetes.io/projected/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-kube-api-access-hd8w9\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.204081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.204267 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.210440 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.212666 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.236562 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd8w9\" (UniqueName: \"kubernetes.io/projected/4f40a1f2-f50f-4898-b3d1-c2b00fd0606c-kube-api-access-hd8w9\") pod \"marketplace-operator-79b997595-ghnt5\" (UID: \"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.278125 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 is running failed: container process not found" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.278750 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 is running failed: container process not found" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.279542 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 is running failed: container process not found" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 05:52:15 crc kubenswrapper[4982]: E0122 05:52:15.279588 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-dw5wf" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="registry-server" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.360121 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.471787 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.613828 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities\") pod \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.613950 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content\") pod \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.613988 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc8sn\" (UniqueName: \"kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn\") pod \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\" (UID: \"d8d13857-ec18-4e5c-aadf-61479ff0b6c2\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.616208 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities" (OuterVolumeSpecName: "utilities") pod "d8d13857-ec18-4e5c-aadf-61479ff0b6c2" (UID: "d8d13857-ec18-4e5c-aadf-61479ff0b6c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.643647 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn" (OuterVolumeSpecName: "kube-api-access-rc8sn") pod "d8d13857-ec18-4e5c-aadf-61479ff0b6c2" (UID: "d8d13857-ec18-4e5c-aadf-61479ff0b6c2"). InnerVolumeSpecName "kube-api-access-rc8sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.679562 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8d13857-ec18-4e5c-aadf-61479ff0b6c2" (UID: "d8d13857-ec18-4e5c-aadf-61479ff0b6c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.722157 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.722193 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.722207 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc8sn\" (UniqueName: \"kubernetes.io/projected/d8d13857-ec18-4e5c-aadf-61479ff0b6c2-kube-api-access-rc8sn\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.736936 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.806393 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.885091 4982 generic.go:334] "Generic (PLEG): container finished" podID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerID="c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a" exitCode=0 Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.885185 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zt96v" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.885266 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerDied","Data":"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.885316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zt96v" event={"ID":"15dc31d8-8456-4ad8-859d-b34c6a85522a","Type":"ContainerDied","Data":"0059a7fe644502aac523ef49c849c9907238ef51b1976d2519a08c0f1e49ee79"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.885343 4982 scope.go:117] "RemoveContainer" containerID="c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.891653 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" exitCode=0 Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.891720 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerDied","Data":"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.891754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4257l" event={"ID":"d8d13857-ec18-4e5c-aadf-61479ff0b6c2","Type":"ContainerDied","Data":"01bb3939f5680bc556a62f4129fc4a74df62bbe29ff72ac2d94b6f467f26d780"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.891821 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4257l" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.900792 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerID="8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb" exitCode=0 Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.901388 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerDied","Data":"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.910890 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvmwc" event={"ID":"0b4ebfeb-4674-45e8-952b-225d022d3392","Type":"ContainerDied","Data":"f7fb456399c0ef23b40c8af5d804deed2bb05391bbe166103665c9df96b5ea21"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.901505 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvmwc" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.911244 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.916048 4982 generic.go:334] "Generic (PLEG): container finished" podID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerID="377086b49c983b259442fa1c81950fcac3fbb2aaae0a1c044e59d12e5ba25d56" exitCode=0 Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.916137 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" event={"ID":"b3854c73-a5eb-4db0-8f25-ecdf90993761","Type":"ContainerDied","Data":"377086b49c983b259442fa1c81950fcac3fbb2aaae0a1c044e59d12e5ba25d56"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.918808 4982 generic.go:334] "Generic (PLEG): container finished" podID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" exitCode=0 Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.918890 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerDied","Data":"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.918962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dw5wf" event={"ID":"27d44766-c0c7-48dd-b00d-0fc5adeb8707","Type":"ContainerDied","Data":"1d46a529df127b26000844db151734bd51d423df2d662dfae9248e8ca95f9715"} Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.918914 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dw5wf" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.921123 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.924928 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42b9z\" (UniqueName: \"kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z\") pod \"15dc31d8-8456-4ad8-859d-b34c6a85522a\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.924995 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content\") pod \"15dc31d8-8456-4ad8-859d-b34c6a85522a\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.925080 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities\") pod \"15dc31d8-8456-4ad8-859d-b34c6a85522a\" (UID: \"15dc31d8-8456-4ad8-859d-b34c6a85522a\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.925117 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgccj\" (UniqueName: \"kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj\") pod \"0b4ebfeb-4674-45e8-952b-225d022d3392\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.925148 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content\") pod \"0b4ebfeb-4674-45e8-952b-225d022d3392\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.925186 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities\") pod \"0b4ebfeb-4674-45e8-952b-225d022d3392\" (UID: \"0b4ebfeb-4674-45e8-952b-225d022d3392\") " Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.933242 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities" (OuterVolumeSpecName: "utilities") pod "0b4ebfeb-4674-45e8-952b-225d022d3392" (UID: "0b4ebfeb-4674-45e8-952b-225d022d3392"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.934885 4982 scope.go:117] "RemoveContainer" containerID="2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.935335 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj" (OuterVolumeSpecName: "kube-api-access-hgccj") pod "0b4ebfeb-4674-45e8-952b-225d022d3392" (UID: "0b4ebfeb-4674-45e8-952b-225d022d3392"). InnerVolumeSpecName "kube-api-access-hgccj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.938640 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities" (OuterVolumeSpecName: "utilities") pod "15dc31d8-8456-4ad8-859d-b34c6a85522a" (UID: "15dc31d8-8456-4ad8-859d-b34c6a85522a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.939201 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z" (OuterVolumeSpecName: "kube-api-access-42b9z") pod "15dc31d8-8456-4ad8-859d-b34c6a85522a" (UID: "15dc31d8-8456-4ad8-859d-b34c6a85522a"). InnerVolumeSpecName "kube-api-access-42b9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.954211 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42b9z\" (UniqueName: \"kubernetes.io/projected/15dc31d8-8456-4ad8-859d-b34c6a85522a-kube-api-access-42b9z\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.954252 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.954293 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgccj\" (UniqueName: \"kubernetes.io/projected/0b4ebfeb-4674-45e8-952b-225d022d3392-kube-api-access-hgccj\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.954320 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.973587 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15dc31d8-8456-4ad8-859d-b34c6a85522a" (UID: "15dc31d8-8456-4ad8-859d-b34c6a85522a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:15 crc kubenswrapper[4982]: I0122 05:52:15.973999 4982 scope.go:117] "RemoveContainer" containerID="862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.020046 4982 scope.go:117] "RemoveContainer" containerID="c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.019658 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.020946 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a\": container with ID starting with c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a not found: ID does not exist" containerID="c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.021420 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a"} err="failed to get container status \"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a\": rpc error: code = NotFound desc = could not find container \"c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a\": container with ID starting with c6de8c87a0951672661ca0ed5d0f5ef80a931cab2a4ee8eb9c036a4bb9f7f18a not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.021625 4982 scope.go:117] "RemoveContainer" containerID="2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.022300 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f\": container with ID starting with 2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f not found: ID does not exist" containerID="2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.022572 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f"} err="failed to get container status \"2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f\": rpc error: code = NotFound desc = could not find container \"2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f\": container with ID starting with 2aa3bfcc5df6f523ed982587a226037957b598ef73a754464ac3e80cf78ad19f not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.022756 4982 scope.go:117] "RemoveContainer" containerID="862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.025003 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b\": container with ID starting with 862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b not found: ID does not exist" containerID="862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.025038 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b"} err="failed to get container status \"862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b\": rpc error: code = NotFound desc = could not find container \"862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b\": container with ID starting with 862e2f861467faf0b91f90cfb545bd6be0d1d7b49130614056a627b63e47230b not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.025058 4982 scope.go:117] "RemoveContainer" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.025152 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4257l"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.028691 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghnt5"] Jan 22 05:52:16 crc kubenswrapper[4982]: W0122 05:52:16.029086 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f40a1f2_f50f_4898_b3d1_c2b00fd0606c.slice/crio-805a82dc9e35132cd1f0caca8a013340696f4126c75cde6b5739b3ad5ab9ad89 WatchSource:0}: Error finding container 805a82dc9e35132cd1f0caca8a013340696f4126c75cde6b5739b3ad5ab9ad89: Status 404 returned error can't find the container with id 805a82dc9e35132cd1f0caca8a013340696f4126c75cde6b5739b3ad5ab9ad89 Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.047620 4982 scope.go:117] "RemoveContainer" containerID="b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.058521 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content\") pod \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.059823 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljc5c\" (UniqueName: \"kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c\") pod \"b3854c73-a5eb-4db0-8f25-ecdf90993761\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.060977 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics\") pod \"b3854c73-a5eb-4db0-8f25-ecdf90993761\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.061618 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca\") pod \"b3854c73-a5eb-4db0-8f25-ecdf90993761\" (UID: \"b3854c73-a5eb-4db0-8f25-ecdf90993761\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.062291 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b3854c73-a5eb-4db0-8f25-ecdf90993761" (UID: "b3854c73-a5eb-4db0-8f25-ecdf90993761"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.062915 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities\") pod \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.063303 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dcqf\" (UniqueName: \"kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf\") pod \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\" (UID: \"27d44766-c0c7-48dd-b00d-0fc5adeb8707\") " Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.063579 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15dc31d8-8456-4ad8-859d-b34c6a85522a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.063597 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.064547 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities" (OuterVolumeSpecName: "utilities") pod "27d44766-c0c7-48dd-b00d-0fc5adeb8707" (UID: "27d44766-c0c7-48dd-b00d-0fc5adeb8707"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.065057 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c" (OuterVolumeSpecName: "kube-api-access-ljc5c") pod "b3854c73-a5eb-4db0-8f25-ecdf90993761" (UID: "b3854c73-a5eb-4db0-8f25-ecdf90993761"). InnerVolumeSpecName "kube-api-access-ljc5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.068081 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b3854c73-a5eb-4db0-8f25-ecdf90993761" (UID: "b3854c73-a5eb-4db0-8f25-ecdf90993761"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.069832 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf" (OuterVolumeSpecName: "kube-api-access-6dcqf") pod "27d44766-c0c7-48dd-b00d-0fc5adeb8707" (UID: "27d44766-c0c7-48dd-b00d-0fc5adeb8707"). InnerVolumeSpecName "kube-api-access-6dcqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.109790 4982 scope.go:117] "RemoveContainer" containerID="c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.118262 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b4ebfeb-4674-45e8-952b-225d022d3392" (UID: "0b4ebfeb-4674-45e8-952b-225d022d3392"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.118316 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27d44766-c0c7-48dd-b00d-0fc5adeb8707" (UID: "27d44766-c0c7-48dd-b00d-0fc5adeb8707"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.151058 4982 scope.go:117] "RemoveContainer" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.151744 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780\": container with ID starting with 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 not found: ID does not exist" containerID="3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.151801 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780"} err="failed to get container status \"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780\": rpc error: code = NotFound desc = could not find container \"3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780\": container with ID starting with 3e6d6115cfc6af5d9631d52b4babd689eac995dbf7a113e780c829997f619780 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.151863 4982 scope.go:117] "RemoveContainer" containerID="b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.152583 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9\": container with ID starting with b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9 not found: ID does not exist" containerID="b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.152629 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9"} err="failed to get container status \"b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9\": rpc error: code = NotFound desc = could not find container \"b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9\": container with ID starting with b5e2c59cfead676be2c312949a0b60bd0679a19d6638baf2b2a6c84ca7b959e9 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.152669 4982 scope.go:117] "RemoveContainer" containerID="c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.153135 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2\": container with ID starting with c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2 not found: ID does not exist" containerID="c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.153185 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2"} err="failed to get container status \"c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2\": rpc error: code = NotFound desc = could not find container \"c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2\": container with ID starting with c3a636d63ba2e53d2d712f3848b411ebfb5dbc8bb7b78edf946402da149b87b2 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.153203 4982 scope.go:117] "RemoveContainer" containerID="8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165383 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b3854c73-a5eb-4db0-8f25-ecdf90993761-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165410 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4ebfeb-4674-45e8-952b-225d022d3392-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165421 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165433 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dcqf\" (UniqueName: \"kubernetes.io/projected/27d44766-c0c7-48dd-b00d-0fc5adeb8707-kube-api-access-6dcqf\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165443 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27d44766-c0c7-48dd-b00d-0fc5adeb8707-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.165453 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljc5c\" (UniqueName: \"kubernetes.io/projected/b3854c73-a5eb-4db0-8f25-ecdf90993761-kube-api-access-ljc5c\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.175813 4982 scope.go:117] "RemoveContainer" containerID="e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.201014 4982 scope.go:117] "RemoveContainer" containerID="2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.233600 4982 scope.go:117] "RemoveContainer" containerID="8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.234529 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb\": container with ID starting with 8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb not found: ID does not exist" containerID="8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.234590 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb"} err="failed to get container status \"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb\": rpc error: code = NotFound desc = could not find container \"8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb\": container with ID starting with 8c6f09d2c0423107eaad3a151c98e934f5f2c8358e7216f7e0a9cc85bb5346fb not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.234627 4982 scope.go:117] "RemoveContainer" containerID="e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.235409 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd\": container with ID starting with e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd not found: ID does not exist" containerID="e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.235453 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd"} err="failed to get container status \"e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd\": rpc error: code = NotFound desc = could not find container \"e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd\": container with ID starting with e4f6c83adadfe84b22c7c76981cfa7ed1f482f65dfcad90d8132a5e5f9903cbd not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.235485 4982 scope.go:117] "RemoveContainer" containerID="2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.236485 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.236892 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4\": container with ID starting with 2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4 not found: ID does not exist" containerID="2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.236936 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4"} err="failed to get container status \"2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4\": rpc error: code = NotFound desc = could not find container \"2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4\": container with ID starting with 2a47518ad377ec2433496e3b1dd0f465b5f1380c5a977154e7df8175ab54d8b4 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.236961 4982 scope.go:117] "RemoveContainer" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.250260 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zt96v"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.256402 4982 scope.go:117] "RemoveContainer" containerID="a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.260444 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.269604 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rvmwc"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.277258 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.282275 4982 scope.go:117] "RemoveContainer" containerID="8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.284155 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dw5wf"] Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.303473 4982 scope.go:117] "RemoveContainer" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.304380 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02\": container with ID starting with 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 not found: ID does not exist" containerID="9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.304438 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02"} err="failed to get container status \"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02\": rpc error: code = NotFound desc = could not find container \"9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02\": container with ID starting with 9c0f9c2acf0cc5fbcd6047512e63463f3394e05ee96d61392f91bd6189f33d02 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.304477 4982 scope.go:117] "RemoveContainer" containerID="a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.305131 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966\": container with ID starting with a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966 not found: ID does not exist" containerID="a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.305210 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966"} err="failed to get container status \"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966\": rpc error: code = NotFound desc = could not find container \"a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966\": container with ID starting with a1c7cd30af4325c653a237e1172906e316cb8202cbefce4387925366be3df966 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.305288 4982 scope.go:117] "RemoveContainer" containerID="8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312" Jan 22 05:52:16 crc kubenswrapper[4982]: E0122 05:52:16.305818 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312\": container with ID starting with 8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312 not found: ID does not exist" containerID="8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.305925 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312"} err="failed to get container status \"8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312\": rpc error: code = NotFound desc = could not find container \"8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312\": container with ID starting with 8897b74983b7660c70e61725afa3b551c7bc9f094161664415f1b6651e5aa312 not found: ID does not exist" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.927538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" event={"ID":"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c","Type":"ContainerStarted","Data":"ba90a54ce600fa9945482c364ab4bfe85dd37a8095b14bd526b31e254549ff4d"} Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.928207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" event={"ID":"4f40a1f2-f50f-4898-b3d1-c2b00fd0606c","Type":"ContainerStarted","Data":"805a82dc9e35132cd1f0caca8a013340696f4126c75cde6b5739b3ad5ab9ad89"} Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.928269 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.932116 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.938539 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" event={"ID":"b3854c73-a5eb-4db0-8f25-ecdf90993761","Type":"ContainerDied","Data":"412c3e3a46222e9ba6f3e1e20451d2635cc22ec389e0171cdd63ccd74ce25dc0"} Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.938713 4982 scope.go:117] "RemoveContainer" containerID="377086b49c983b259442fa1c81950fcac3fbb2aaae0a1c044e59d12e5ba25d56" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.938985 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xgblw" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.953426 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ghnt5" podStartSLOduration=2.953402475 podStartE2EDuration="2.953402475s" podCreationTimestamp="2026-01-22 05:52:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:52:16.947677216 +0000 UTC m=+397.786315219" watchObservedRunningTime="2026-01-22 05:52:16.953402475 +0000 UTC m=+397.792040468" Jan 22 05:52:16 crc kubenswrapper[4982]: I0122 05:52:16.997568 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.005194 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xgblw"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.128905 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnzs"] Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129245 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129268 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129288 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129301 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129324 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129337 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129352 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129367 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129381 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129393 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129412 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129424 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129440 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129452 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129467 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129479 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129494 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129505 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129523 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129540 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129567 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129587 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="extract-utilities" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129611 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129626 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: E0122 05:52:17.129645 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129657 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="extract-content" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129809 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129837 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" containerName="marketplace-operator" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129884 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129907 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.129926 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" containerName="registry-server" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.131266 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.135412 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.143468 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnzs"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.184310 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-utilities\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.184480 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72dxp\" (UniqueName: \"kubernetes.io/projected/7bf4b1c6-51f5-483c-a175-d197751eeb6d-kube-api-access-72dxp\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.184587 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-catalog-content\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.286105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72dxp\" (UniqueName: \"kubernetes.io/projected/7bf4b1c6-51f5-483c-a175-d197751eeb6d-kube-api-access-72dxp\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.286182 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-catalog-content\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.286228 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-utilities\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.286727 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-catalog-content\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.286791 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bf4b1c6-51f5-483c-a175-d197751eeb6d-utilities\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.324178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72dxp\" (UniqueName: \"kubernetes.io/projected/7bf4b1c6-51f5-483c-a175-d197751eeb6d-kube-api-access-72dxp\") pod \"redhat-marketplace-nqnzs\" (UID: \"7bf4b1c6-51f5-483c-a175-d197751eeb6d\") " pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.338485 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nh679"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.339941 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.342344 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.347816 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nh679"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.388013 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-utilities\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.388072 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-catalog-content\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.388113 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn8wg\" (UniqueName: \"kubernetes.io/projected/702f20a9-dc10-4e71-8ba8-1f8f180af1be-kube-api-access-wn8wg\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.448638 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.489892 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn8wg\" (UniqueName: \"kubernetes.io/projected/702f20a9-dc10-4e71-8ba8-1f8f180af1be-kube-api-access-wn8wg\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.490326 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-utilities\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.490479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-catalog-content\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.515806 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-catalog-content\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.517545 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/702f20a9-dc10-4e71-8ba8-1f8f180af1be-utilities\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.536653 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn8wg\" (UniqueName: \"kubernetes.io/projected/702f20a9-dc10-4e71-8ba8-1f8f180af1be-kube-api-access-wn8wg\") pod \"certified-operators-nh679\" (UID: \"702f20a9-dc10-4e71-8ba8-1f8f180af1be\") " pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.665749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.726350 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b4ebfeb-4674-45e8-952b-225d022d3392" path="/var/lib/kubelet/pods/0b4ebfeb-4674-45e8-952b-225d022d3392/volumes" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.727304 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15dc31d8-8456-4ad8-859d-b34c6a85522a" path="/var/lib/kubelet/pods/15dc31d8-8456-4ad8-859d-b34c6a85522a/volumes" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.728034 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d44766-c0c7-48dd-b00d-0fc5adeb8707" path="/var/lib/kubelet/pods/27d44766-c0c7-48dd-b00d-0fc5adeb8707/volumes" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.729892 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3854c73-a5eb-4db0-8f25-ecdf90993761" path="/var/lib/kubelet/pods/b3854c73-a5eb-4db0-8f25-ecdf90993761/volumes" Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.730336 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8d13857-ec18-4e5c-aadf-61479ff0b6c2" path="/var/lib/kubelet/pods/d8d13857-ec18-4e5c-aadf-61479ff0b6c2/volumes" Jan 22 05:52:17 crc kubenswrapper[4982]: W0122 05:52:17.897168 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bf4b1c6_51f5_483c_a175_d197751eeb6d.slice/crio-85daac53a4f23e1db56618c03406a616fdaf9ba40fb518569d951fe0abbb64e0 WatchSource:0}: Error finding container 85daac53a4f23e1db56618c03406a616fdaf9ba40fb518569d951fe0abbb64e0: Status 404 returned error can't find the container with id 85daac53a4f23e1db56618c03406a616fdaf9ba40fb518569d951fe0abbb64e0 Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.901836 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnzs"] Jan 22 05:52:17 crc kubenswrapper[4982]: I0122 05:52:17.949790 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnzs" event={"ID":"7bf4b1c6-51f5-483c-a175-d197751eeb6d","Type":"ContainerStarted","Data":"85daac53a4f23e1db56618c03406a616fdaf9ba40fb518569d951fe0abbb64e0"} Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.141807 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nh679"] Jan 22 05:52:18 crc kubenswrapper[4982]: W0122 05:52:18.171655 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod702f20a9_dc10_4e71_8ba8_1f8f180af1be.slice/crio-1817de939d0c1f4f859dc4707b72da4f82da1a229a54c00be497e5c95b1fb512 WatchSource:0}: Error finding container 1817de939d0c1f4f859dc4707b72da4f82da1a229a54c00be497e5c95b1fb512: Status 404 returned error can't find the container with id 1817de939d0c1f4f859dc4707b72da4f82da1a229a54c00be497e5c95b1fb512 Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.956752 4982 generic.go:334] "Generic (PLEG): container finished" podID="7bf4b1c6-51f5-483c-a175-d197751eeb6d" containerID="39cb9adaffe304c9c73ca0e774dedb0343d283c8ab83f3ebb1519a63e115a84f" exitCode=0 Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.956838 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnzs" event={"ID":"7bf4b1c6-51f5-483c-a175-d197751eeb6d","Type":"ContainerDied","Data":"39cb9adaffe304c9c73ca0e774dedb0343d283c8ab83f3ebb1519a63e115a84f"} Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.959592 4982 generic.go:334] "Generic (PLEG): container finished" podID="702f20a9-dc10-4e71-8ba8-1f8f180af1be" containerID="552b3b353dc79516da89ad4c5569aa7f9831cf166ed66a3376844696257e85eb" exitCode=0 Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.960282 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nh679" event={"ID":"702f20a9-dc10-4e71-8ba8-1f8f180af1be","Type":"ContainerDied","Data":"552b3b353dc79516da89ad4c5569aa7f9831cf166ed66a3376844696257e85eb"} Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.960315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nh679" event={"ID":"702f20a9-dc10-4e71-8ba8-1f8f180af1be","Type":"ContainerStarted","Data":"1817de939d0c1f4f859dc4707b72da4f82da1a229a54c00be497e5c95b1fb512"} Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.973456 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:52:18 crc kubenswrapper[4982]: I0122 05:52:18.973498 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.528422 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2jtbg"] Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.531268 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.538729 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.546906 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jtbg"] Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.639202 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-utilities\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.639277 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbqtc\" (UniqueName: \"kubernetes.io/projected/1b6817e1-b502-4ace-aa86-0de7561ae865-kube-api-access-qbqtc\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.639411 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-catalog-content\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.741537 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-catalog-content\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.741700 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-utilities\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.741788 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbqtc\" (UniqueName: \"kubernetes.io/projected/1b6817e1-b502-4ace-aa86-0de7561ae865-kube-api-access-qbqtc\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.743456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-utilities\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.743756 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b6817e1-b502-4ace-aa86-0de7561ae865-catalog-content\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.763508 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.766402 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.766569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.771552 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.785712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbqtc\" (UniqueName: \"kubernetes.io/projected/1b6817e1-b502-4ace-aa86-0de7561ae865-kube-api-access-qbqtc\") pod \"redhat-operators-2jtbg\" (UID: \"1b6817e1-b502-4ace-aa86-0de7561ae865\") " pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.842891 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.842980 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vff77\" (UniqueName: \"kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.843056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.855637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.944662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.945003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.945045 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vff77\" (UniqueName: \"kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.945356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.945639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.973762 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vff77\" (UniqueName: \"kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77\") pod \"community-operators-85lr9\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.978574 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnzs" event={"ID":"7bf4b1c6-51f5-483c-a175-d197751eeb6d","Type":"ContainerStarted","Data":"cf24cc5fc3ad35217af62acfe977d563567df38e2e1333f0496012368b37ecf9"} Jan 22 05:52:19 crc kubenswrapper[4982]: I0122 05:52:19.985349 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nh679" event={"ID":"702f20a9-dc10-4e71-8ba8-1f8f180af1be","Type":"ContainerStarted","Data":"b3488b2d0232ad17dc2d8c01077145d701ec0827e22f1fae6a5441b60c54cefe"} Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.130979 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.325927 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jtbg"] Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.565436 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 05:52:20 crc kubenswrapper[4982]: W0122 05:52:20.571806 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5afe272a_253c_4286_b86b_fc78a1cbb21d.slice/crio-12ef494cf3ac190fec42db75767cb4715c040fb3616e02200c156248dfcc0a0b WatchSource:0}: Error finding container 12ef494cf3ac190fec42db75767cb4715c040fb3616e02200c156248dfcc0a0b: Status 404 returned error can't find the container with id 12ef494cf3ac190fec42db75767cb4715c040fb3616e02200c156248dfcc0a0b Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.993318 4982 generic.go:334] "Generic (PLEG): container finished" podID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerID="b2c4aee486ece5927491b5b3762a73e4004c2578b28fae105a249eaa9023fd26" exitCode=0 Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.993419 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerDied","Data":"b2c4aee486ece5927491b5b3762a73e4004c2578b28fae105a249eaa9023fd26"} Jan 22 05:52:20 crc kubenswrapper[4982]: I0122 05:52:20.993798 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerStarted","Data":"12ef494cf3ac190fec42db75767cb4715c040fb3616e02200c156248dfcc0a0b"} Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.001190 4982 generic.go:334] "Generic (PLEG): container finished" podID="702f20a9-dc10-4e71-8ba8-1f8f180af1be" containerID="b3488b2d0232ad17dc2d8c01077145d701ec0827e22f1fae6a5441b60c54cefe" exitCode=0 Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.001304 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nh679" event={"ID":"702f20a9-dc10-4e71-8ba8-1f8f180af1be","Type":"ContainerDied","Data":"b3488b2d0232ad17dc2d8c01077145d701ec0827e22f1fae6a5441b60c54cefe"} Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.002974 4982 generic.go:334] "Generic (PLEG): container finished" podID="1b6817e1-b502-4ace-aa86-0de7561ae865" containerID="adbe9fc3317a17013f224765e10d9478ac80e606182d7f802e62cc5d8693f438" exitCode=0 Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.003055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jtbg" event={"ID":"1b6817e1-b502-4ace-aa86-0de7561ae865","Type":"ContainerDied","Data":"adbe9fc3317a17013f224765e10d9478ac80e606182d7f802e62cc5d8693f438"} Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.003090 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jtbg" event={"ID":"1b6817e1-b502-4ace-aa86-0de7561ae865","Type":"ContainerStarted","Data":"7f2efa73bc186434e42f40457a175d9980798b922645fa40ae0250c0108a57a2"} Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.010698 4982 generic.go:334] "Generic (PLEG): container finished" podID="7bf4b1c6-51f5-483c-a175-d197751eeb6d" containerID="cf24cc5fc3ad35217af62acfe977d563567df38e2e1333f0496012368b37ecf9" exitCode=0 Jan 22 05:52:21 crc kubenswrapper[4982]: I0122 05:52:21.010755 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnzs" event={"ID":"7bf4b1c6-51f5-483c-a175-d197751eeb6d","Type":"ContainerDied","Data":"cf24cc5fc3ad35217af62acfe977d563567df38e2e1333f0496012368b37ecf9"} Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.018636 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jtbg" event={"ID":"1b6817e1-b502-4ace-aa86-0de7561ae865","Type":"ContainerStarted","Data":"025962aaab514773e9551b2eed9ef2bf22cc72e479ddc02b2661f83ccc18a441"} Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.021036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnzs" event={"ID":"7bf4b1c6-51f5-483c-a175-d197751eeb6d","Type":"ContainerStarted","Data":"0c4d132c1c814fa95a95562469ad8f16dccc927647d27f38d02ae0fad6fac233"} Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.025082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerStarted","Data":"de0254e3b6399523e23737fdf1438fb129c634d87944beae16843819ea580df1"} Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.027101 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nh679" event={"ID":"702f20a9-dc10-4e71-8ba8-1f8f180af1be","Type":"ContainerStarted","Data":"b8c731828cacdd019c96e62f39f7f650fe3655cb3fc208202473447a1b347a10"} Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.070022 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nh679" podStartSLOduration=2.402321306 podStartE2EDuration="5.07000428s" podCreationTimestamp="2026-01-22 05:52:17 +0000 UTC" firstStartedPulling="2026-01-22 05:52:18.962150029 +0000 UTC m=+399.800788032" lastFinishedPulling="2026-01-22 05:52:21.629833003 +0000 UTC m=+402.468471006" observedRunningTime="2026-01-22 05:52:22.068439346 +0000 UTC m=+402.907077349" watchObservedRunningTime="2026-01-22 05:52:22.07000428 +0000 UTC m=+402.908642293" Jan 22 05:52:22 crc kubenswrapper[4982]: I0122 05:52:22.124152 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nqnzs" podStartSLOduration=2.564607976 podStartE2EDuration="5.124120167s" podCreationTimestamp="2026-01-22 05:52:17 +0000 UTC" firstStartedPulling="2026-01-22 05:52:18.959054203 +0000 UTC m=+399.797692206" lastFinishedPulling="2026-01-22 05:52:21.518566394 +0000 UTC m=+402.357204397" observedRunningTime="2026-01-22 05:52:22.10035955 +0000 UTC m=+402.938997553" watchObservedRunningTime="2026-01-22 05:52:22.124120167 +0000 UTC m=+402.962758170" Jan 22 05:52:23 crc kubenswrapper[4982]: I0122 05:52:23.035550 4982 generic.go:334] "Generic (PLEG): container finished" podID="1b6817e1-b502-4ace-aa86-0de7561ae865" containerID="025962aaab514773e9551b2eed9ef2bf22cc72e479ddc02b2661f83ccc18a441" exitCode=0 Jan 22 05:52:23 crc kubenswrapper[4982]: I0122 05:52:23.035655 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jtbg" event={"ID":"1b6817e1-b502-4ace-aa86-0de7561ae865","Type":"ContainerDied","Data":"025962aaab514773e9551b2eed9ef2bf22cc72e479ddc02b2661f83ccc18a441"} Jan 22 05:52:23 crc kubenswrapper[4982]: I0122 05:52:23.049148 4982 generic.go:334] "Generic (PLEG): container finished" podID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerID="de0254e3b6399523e23737fdf1438fb129c634d87944beae16843819ea580df1" exitCode=0 Jan 22 05:52:23 crc kubenswrapper[4982]: I0122 05:52:23.049429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerDied","Data":"de0254e3b6399523e23737fdf1438fb129c634d87944beae16843819ea580df1"} Jan 22 05:52:24 crc kubenswrapper[4982]: I0122 05:52:24.057819 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerStarted","Data":"cb1a54248d20f69851f0b7b9fed7853d8f741c2a333beeab85d6da35edce4e8a"} Jan 22 05:52:24 crc kubenswrapper[4982]: I0122 05:52:24.060098 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jtbg" event={"ID":"1b6817e1-b502-4ace-aa86-0de7561ae865","Type":"ContainerStarted","Data":"7cd17dcb5abfa6f14814bac23e62194f91b569634fc357f71a228e6e2bfebe0e"} Jan 22 05:52:24 crc kubenswrapper[4982]: I0122 05:52:24.083413 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-85lr9" podStartSLOduration=2.607855006 podStartE2EDuration="5.083390403s" podCreationTimestamp="2026-01-22 05:52:19 +0000 UTC" firstStartedPulling="2026-01-22 05:52:20.996204203 +0000 UTC m=+401.834842206" lastFinishedPulling="2026-01-22 05:52:23.4717396 +0000 UTC m=+404.310377603" observedRunningTime="2026-01-22 05:52:24.080908374 +0000 UTC m=+404.919546387" watchObservedRunningTime="2026-01-22 05:52:24.083390403 +0000 UTC m=+404.922028406" Jan 22 05:52:24 crc kubenswrapper[4982]: I0122 05:52:24.138390 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2jtbg" podStartSLOduration=2.662592569 podStartE2EDuration="5.138364493s" podCreationTimestamp="2026-01-22 05:52:19 +0000 UTC" firstStartedPulling="2026-01-22 05:52:21.004189074 +0000 UTC m=+401.842827077" lastFinishedPulling="2026-01-22 05:52:23.479960978 +0000 UTC m=+404.318599001" observedRunningTime="2026-01-22 05:52:24.130387892 +0000 UTC m=+404.969025895" watchObservedRunningTime="2026-01-22 05:52:24.138364493 +0000 UTC m=+404.977002496" Jan 22 05:52:25 crc kubenswrapper[4982]: I0122 05:52:25.325324 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-trfnj" Jan 22 05:52:25 crc kubenswrapper[4982]: I0122 05:52:25.397770 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.449742 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.452770 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.510777 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.666588 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.666679 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:27 crc kubenswrapper[4982]: I0122 05:52:27.734350 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:28 crc kubenswrapper[4982]: I0122 05:52:28.129600 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nh679" Jan 22 05:52:28 crc kubenswrapper[4982]: I0122 05:52:28.161101 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nqnzs" Jan 22 05:52:29 crc kubenswrapper[4982]: I0122 05:52:29.856757 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:29 crc kubenswrapper[4982]: I0122 05:52:29.856831 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:29 crc kubenswrapper[4982]: I0122 05:52:29.905901 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:30 crc kubenswrapper[4982]: I0122 05:52:30.131150 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:30 crc kubenswrapper[4982]: I0122 05:52:30.131236 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:30 crc kubenswrapper[4982]: I0122 05:52:30.136429 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2jtbg" Jan 22 05:52:30 crc kubenswrapper[4982]: I0122 05:52:30.177804 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:31 crc kubenswrapper[4982]: I0122 05:52:31.170925 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-85lr9" Jan 22 05:52:48 crc kubenswrapper[4982]: I0122 05:52:48.973983 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:52:48 crc kubenswrapper[4982]: I0122 05:52:48.974884 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:52:48 crc kubenswrapper[4982]: I0122 05:52:48.974962 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:52:48 crc kubenswrapper[4982]: I0122 05:52:48.976251 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 05:52:48 crc kubenswrapper[4982]: I0122 05:52:48.976408 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9" gracePeriod=600 Jan 22 05:52:49 crc kubenswrapper[4982]: I0122 05:52:49.250126 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9" exitCode=0 Jan 22 05:52:49 crc kubenswrapper[4982]: I0122 05:52:49.250215 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9"} Jan 22 05:52:49 crc kubenswrapper[4982]: I0122 05:52:49.250796 4982 scope.go:117] "RemoveContainer" containerID="7b2101842583ccef5e3557cde4541e85536cfd2120a074c50bf93960e575e98d" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.262319 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7"} Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.448992 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" podUID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" containerName="registry" containerID="cri-o://b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f" gracePeriod=30 Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.953717 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972044 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972454 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972536 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972624 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrrqw\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972723 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972842 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.972895 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token\") pod \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\" (UID: \"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6\") " Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.973466 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.976670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.985694 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.989015 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.989223 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.994366 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw" (OuterVolumeSpecName: "kube-api-access-jrrqw") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "kube-api-access-jrrqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.995155 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 05:52:50 crc kubenswrapper[4982]: I0122 05:52:50.998533 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" (UID: "e7c4b062-c2e0-41db-81b1-d3bcda20a2c6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075226 4982 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075276 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075286 4982 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075301 4982 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075311 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrrqw\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-kube-api-access-jrrqw\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075323 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.075332 4982 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.271057 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" containerID="b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f" exitCode=0 Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.271152 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.271206 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" event={"ID":"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6","Type":"ContainerDied","Data":"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f"} Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.271247 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-g89zh" event={"ID":"e7c4b062-c2e0-41db-81b1-d3bcda20a2c6","Type":"ContainerDied","Data":"6b92ad6c28fdfc5ba440166f69d35f9724510eae7b7b7600d4eb86a93de7fb1a"} Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.271275 4982 scope.go:117] "RemoveContainer" containerID="b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.299521 4982 scope.go:117] "RemoveContainer" containerID="b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f" Jan 22 05:52:51 crc kubenswrapper[4982]: E0122 05:52:51.301117 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f\": container with ID starting with b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f not found: ID does not exist" containerID="b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.301155 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f"} err="failed to get container status \"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f\": rpc error: code = NotFound desc = could not find container \"b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f\": container with ID starting with b7684f0177f2e1840c31f1cce8ce77df0704e9acaf5c2138426dfae1aa59238f not found: ID does not exist" Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.317910 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.322339 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-g89zh"] Jan 22 05:52:51 crc kubenswrapper[4982]: I0122 05:52:51.732887 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" path="/var/lib/kubelet/pods/e7c4b062-c2e0-41db-81b1-d3bcda20a2c6/volumes" Jan 22 05:55:18 crc kubenswrapper[4982]: I0122 05:55:18.973773 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:55:18 crc kubenswrapper[4982]: I0122 05:55:18.974504 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:55:48 crc kubenswrapper[4982]: I0122 05:55:48.975690 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:55:48 crc kubenswrapper[4982]: I0122 05:55:48.976936 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:56:18 crc kubenswrapper[4982]: I0122 05:56:18.974436 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:56:18 crc kubenswrapper[4982]: I0122 05:56:18.975315 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:56:18 crc kubenswrapper[4982]: I0122 05:56:18.975423 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:56:18 crc kubenswrapper[4982]: I0122 05:56:18.976847 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 05:56:18 crc kubenswrapper[4982]: I0122 05:56:18.976978 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7" gracePeriod=600 Jan 22 05:56:19 crc kubenswrapper[4982]: I0122 05:56:19.488740 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7" exitCode=0 Jan 22 05:56:19 crc kubenswrapper[4982]: I0122 05:56:19.488877 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7"} Jan 22 05:56:19 crc kubenswrapper[4982]: I0122 05:56:19.489527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f"} Jan 22 05:56:19 crc kubenswrapper[4982]: I0122 05:56:19.489580 4982 scope.go:117] "RemoveContainer" containerID="0e0167ccad0044847a9f8ef5cc1dd186b4b6a77a06b333f87c761d1c1a524dd9" Jan 22 05:58:22 crc kubenswrapper[4982]: I0122 05:58:22.050020 4982 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.324574 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mb2qs"] Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.326649 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-controller" containerID="cri-o://046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327296 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="northd" containerID="cri-o://8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327536 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327707 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-node" containerID="cri-o://6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327747 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="sbdb" containerID="cri-o://a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327817 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-acl-logging" containerID="cri-o://77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.327893 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="nbdb" containerID="cri-o://17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.392780 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" containerID="cri-o://ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" gracePeriod=30 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.544996 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/2.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.545591 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/1.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.545637 4982 generic.go:334] "Generic (PLEG): container finished" podID="f70d53ec-9c73-45bf-b6b4-ec45565ef1e6" containerID="fc395d6b6126f9c3f97817e0faf0cff772c0dd4a55c3471a807153cc002852ae" exitCode=2 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.545729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerDied","Data":"fc395d6b6126f9c3f97817e0faf0cff772c0dd4a55c3471a807153cc002852ae"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.545825 4982 scope.go:117] "RemoveContainer" containerID="8b107116500053caa4af8918d8569196067dea04df195595dd9876338a9c1fcc" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.546443 4982 scope.go:117] "RemoveContainer" containerID="fc395d6b6126f9c3f97817e0faf0cff772c0dd4a55c3471a807153cc002852ae" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.551960 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovnkube-controller/3.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.555319 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-acl-logging/0.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.556043 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-controller/0.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557813 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" exitCode=0 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557844 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" exitCode=0 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557878 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" exitCode=0 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557892 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" exitCode=143 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557904 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" exitCode=143 Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557935 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.557986 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.558006 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.558022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9"} Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.669125 4982 scope.go:117] "RemoveContainer" containerID="34cb6c04bceeb41b90c18f2025140c1131ca41c0ade06c2447fefe3524744fa4" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.700369 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-acl-logging/0.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.700768 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-controller/0.log" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.702000 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774465 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5h6fl"] Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774734 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-node" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774762 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-node" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774784 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" containerName="registry" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774794 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" containerName="registry" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774804 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="sbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774814 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="sbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774826 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774837 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774884 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774894 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774905 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-acl-logging" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774913 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-acl-logging" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774924 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774931 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774942 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="nbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774949 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="nbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774957 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kubecfg-setup" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774965 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kubecfg-setup" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774974 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.774984 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.774996 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="northd" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775003 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="northd" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.775017 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775025 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.775032 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775039 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775173 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775186 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775194 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovn-acl-logging" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775204 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775213 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-node" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775223 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775233 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775246 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7c4b062-c2e0-41db-81b1-d3bcda20a2c6" containerName="registry" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775259 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="northd" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775270 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="nbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775278 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="sbdb" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775287 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 05:58:33 crc kubenswrapper[4982]: E0122 05:58:33.775397 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775405 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.775523 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerName="ovnkube-controller" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.777154 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789099 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789151 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789196 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789230 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789253 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789265 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789327 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789329 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789384 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789388 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789412 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log" (OuterVolumeSpecName: "node-log") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789444 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket" (OuterVolumeSpecName: "log-socket") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789456 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr8wb\" (UniqueName: \"kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789526 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789557 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash" (OuterVolumeSpecName: "host-slash") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789637 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789676 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789706 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789680 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789743 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789726 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789785 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789794 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789817 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789835 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789888 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789905 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789922 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.789964 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790029 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790062 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd\") pod \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\" (UID: \"45295ff5-bb7d-450f-9ff1-eeb4edb0d705\") " Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790349 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-etc-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3ad852a-b27c-41c8-9c05-3165e4751755-ovn-node-metrics-cert\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790452 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790518 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-var-lib-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790561 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-slash\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790591 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-node-log\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790645 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-kubelet\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790722 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-env-overrides\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790873 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-systemd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790923 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-netd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790976 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-netns\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791038 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-systemd-units\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791140 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-bin\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791193 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-log-socket\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-script-lib\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791283 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwk66\" (UniqueName: \"kubernetes.io/projected/e3ad852a-b27c-41c8-9c05-3165e4751755-kube-api-access-kwk66\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791317 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791376 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-ovn\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791419 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-config\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791520 4982 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791550 4982 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791572 4982 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-node-log\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791593 4982 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791613 4982 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-log-socket\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791633 4982 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791652 4982 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-slash\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791671 4982 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791690 4982 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791709 4982 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791730 4982 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791752 4982 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.791772 4982 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790031 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790280 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.790576 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.793405 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.797880 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb" (OuterVolumeSpecName: "kube-api-access-fr8wb") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "kube-api-access-fr8wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.798989 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.817035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "45295ff5-bb7d-450f-9ff1-eeb4edb0d705" (UID: "45295ff5-bb7d-450f-9ff1-eeb4edb0d705"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893400 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-systemd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893500 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-systemd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-netd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893551 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-netd\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893589 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-netns\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893640 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-systemd-units\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-netns\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893717 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-bin\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-log-socket\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893809 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-log-socket\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-script-lib\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.893937 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-systemd-units\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894046 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-cni-bin\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894144 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwk66\" (UniqueName: \"kubernetes.io/projected/e3ad852a-b27c-41c8-9c05-3165e4751755-kube-api-access-kwk66\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894179 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894223 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-ovn\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-config\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894298 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-etc-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3ad852a-b27c-41c8-9c05-3165e4751755-ovn-node-metrics-cert\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-var-lib-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-slash\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894471 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-node-log\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-kubelet\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894562 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-env-overrides\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894707 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr8wb\" (UniqueName: \"kubernetes.io/projected/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-kube-api-access-fr8wb\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894738 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894759 4982 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894781 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894790 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-script-lib\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894800 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894819 4982 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.894837 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45295ff5-bb7d-450f-9ff1-eeb4edb0d705-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895304 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-ovn\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895409 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-run-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895450 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-etc-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895488 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-slash\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895524 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-run-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-host-kubelet\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895696 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-node-log\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3ad852a-b27c-41c8-9c05-3165e4751755-var-lib-openvswitch\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.895982 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-env-overrides\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.896065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3ad852a-b27c-41c8-9c05-3165e4751755-ovnkube-config\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.899069 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3ad852a-b27c-41c8-9c05-3165e4751755-ovn-node-metrics-cert\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:33 crc kubenswrapper[4982]: I0122 05:58:33.922061 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwk66\" (UniqueName: \"kubernetes.io/projected/e3ad852a-b27c-41c8-9c05-3165e4751755-kube-api-access-kwk66\") pod \"ovnkube-node-5h6fl\" (UID: \"e3ad852a-b27c-41c8-9c05-3165e4751755\") " pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.094838 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.567985 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/2.log" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.568300 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4xrw6" event={"ID":"f70d53ec-9c73-45bf-b6b4-ec45565ef1e6","Type":"ContainerStarted","Data":"1ca452451a8701551aa7790bba90c2de6c23084da1b99c3bc543209e13f45497"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.574909 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-acl-logging/0.log" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.575683 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mb2qs_45295ff5-bb7d-450f-9ff1-eeb4edb0d705/ovn-controller/0.log" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576325 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" exitCode=0 Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576399 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" exitCode=0 Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576423 4982 generic.go:334] "Generic (PLEG): container finished" podID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" exitCode=0 Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576541 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576628 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576657 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" event={"ID":"45295ff5-bb7d-450f-9ff1-eeb4edb0d705","Type":"ContainerDied","Data":"5e8c8e68d30551f0db062125d393170b625603591825be844bd20c1ca9c55e6a"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.576703 4982 scope.go:117] "RemoveContainer" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.577018 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mb2qs" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.583972 4982 generic.go:334] "Generic (PLEG): container finished" podID="e3ad852a-b27c-41c8-9c05-3165e4751755" containerID="bc9ca2b6b516ea6a109a9f9af8de1026c274ddcf56c705a6aa872d95b1c6cddc" exitCode=0 Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.584031 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerDied","Data":"bc9ca2b6b516ea6a109a9f9af8de1026c274ddcf56c705a6aa872d95b1c6cddc"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.584069 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"c1251390d139dce4c6ebc5c9bcb0ac6996e60da6fd03edf9f23b2e39c6ffa81e"} Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.632917 4982 scope.go:117] "RemoveContainer" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.663168 4982 scope.go:117] "RemoveContainer" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.721373 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mb2qs"] Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.721830 4982 scope.go:117] "RemoveContainer" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.731944 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mb2qs"] Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.750018 4982 scope.go:117] "RemoveContainer" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.771016 4982 scope.go:117] "RemoveContainer" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.788878 4982 scope.go:117] "RemoveContainer" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.809263 4982 scope.go:117] "RemoveContainer" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.832005 4982 scope.go:117] "RemoveContainer" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.860662 4982 scope.go:117] "RemoveContainer" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.862143 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": container with ID starting with ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6 not found: ID does not exist" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.862204 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6"} err="failed to get container status \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": rpc error: code = NotFound desc = could not find container \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": container with ID starting with ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.862250 4982 scope.go:117] "RemoveContainer" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.862883 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": container with ID starting with a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c not found: ID does not exist" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.862934 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c"} err="failed to get container status \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": rpc error: code = NotFound desc = could not find container \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": container with ID starting with a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.862981 4982 scope.go:117] "RemoveContainer" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.863420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": container with ID starting with 17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1 not found: ID does not exist" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.863477 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1"} err="failed to get container status \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": rpc error: code = NotFound desc = could not find container \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": container with ID starting with 17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.863509 4982 scope.go:117] "RemoveContainer" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.864298 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": container with ID starting with 8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295 not found: ID does not exist" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.864325 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295"} err="failed to get container status \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": rpc error: code = NotFound desc = could not find container \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": container with ID starting with 8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.864341 4982 scope.go:117] "RemoveContainer" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.864781 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": container with ID starting with ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b not found: ID does not exist" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.864839 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b"} err="failed to get container status \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": rpc error: code = NotFound desc = could not find container \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": container with ID starting with ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.864902 4982 scope.go:117] "RemoveContainer" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.865743 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": container with ID starting with 6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087 not found: ID does not exist" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.865770 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087"} err="failed to get container status \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": rpc error: code = NotFound desc = could not find container \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": container with ID starting with 6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.865787 4982 scope.go:117] "RemoveContainer" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.866261 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": container with ID starting with 77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41 not found: ID does not exist" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.866288 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41"} err="failed to get container status \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": rpc error: code = NotFound desc = could not find container \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": container with ID starting with 77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.866313 4982 scope.go:117] "RemoveContainer" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.866722 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": container with ID starting with 046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9 not found: ID does not exist" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.866749 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9"} err="failed to get container status \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": rpc error: code = NotFound desc = could not find container \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": container with ID starting with 046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.866763 4982 scope.go:117] "RemoveContainer" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" Jan 22 05:58:34 crc kubenswrapper[4982]: E0122 05:58:34.867071 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": container with ID starting with 70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa not found: ID does not exist" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.867091 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa"} err="failed to get container status \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": rpc error: code = NotFound desc = could not find container \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": container with ID starting with 70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.867103 4982 scope.go:117] "RemoveContainer" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.867401 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6"} err="failed to get container status \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": rpc error: code = NotFound desc = could not find container \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": container with ID starting with ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.867419 4982 scope.go:117] "RemoveContainer" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.868081 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c"} err="failed to get container status \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": rpc error: code = NotFound desc = could not find container \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": container with ID starting with a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.868098 4982 scope.go:117] "RemoveContainer" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.868568 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1"} err="failed to get container status \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": rpc error: code = NotFound desc = could not find container \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": container with ID starting with 17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.868586 4982 scope.go:117] "RemoveContainer" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.870265 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295"} err="failed to get container status \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": rpc error: code = NotFound desc = could not find container \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": container with ID starting with 8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.870284 4982 scope.go:117] "RemoveContainer" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.870892 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b"} err="failed to get container status \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": rpc error: code = NotFound desc = could not find container \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": container with ID starting with ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.870939 4982 scope.go:117] "RemoveContainer" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.871403 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087"} err="failed to get container status \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": rpc error: code = NotFound desc = could not find container \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": container with ID starting with 6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.871446 4982 scope.go:117] "RemoveContainer" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.871987 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41"} err="failed to get container status \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": rpc error: code = NotFound desc = could not find container \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": container with ID starting with 77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.872035 4982 scope.go:117] "RemoveContainer" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.872483 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9"} err="failed to get container status \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": rpc error: code = NotFound desc = could not find container \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": container with ID starting with 046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.872507 4982 scope.go:117] "RemoveContainer" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.872949 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa"} err="failed to get container status \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": rpc error: code = NotFound desc = could not find container \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": container with ID starting with 70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.872991 4982 scope.go:117] "RemoveContainer" containerID="ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.874241 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6"} err="failed to get container status \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": rpc error: code = NotFound desc = could not find container \"ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6\": container with ID starting with ff8445b70de4abf6583e4942c976355794ca8dc1d7de89301c6fff3e50b7efb6 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.874288 4982 scope.go:117] "RemoveContainer" containerID="a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.874626 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c"} err="failed to get container status \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": rpc error: code = NotFound desc = could not find container \"a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c\": container with ID starting with a732e8418f5b72978be2f401c9e1017eb5c911bdfc6bfa4d7bc67ca4420c296c not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.874655 4982 scope.go:117] "RemoveContainer" containerID="17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.875016 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1"} err="failed to get container status \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": rpc error: code = NotFound desc = could not find container \"17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1\": container with ID starting with 17d534f837e4b33b201cad3588b13d1d1b310e24554e21a5f1f1539e8400a5a1 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.875043 4982 scope.go:117] "RemoveContainer" containerID="8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.875469 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295"} err="failed to get container status \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": rpc error: code = NotFound desc = could not find container \"8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295\": container with ID starting with 8a303ec59c57267a984b2f5a9eed025d971bd8c3946077eef8fff09b22652295 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.875512 4982 scope.go:117] "RemoveContainer" containerID="ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.875983 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b"} err="failed to get container status \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": rpc error: code = NotFound desc = could not find container \"ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b\": container with ID starting with ddc5894c3785f0a4b01bbb5107e845cf30ec8137093a9314eba056b49fd1a31b not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.876018 4982 scope.go:117] "RemoveContainer" containerID="6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.880112 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087"} err="failed to get container status \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": rpc error: code = NotFound desc = could not find container \"6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087\": container with ID starting with 6d3f98c8734b9e7e18798d0a7cef18c0c88e5ac7e8c3d385fb0ba5b99aca0087 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.880205 4982 scope.go:117] "RemoveContainer" containerID="77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.882391 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41"} err="failed to get container status \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": rpc error: code = NotFound desc = could not find container \"77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41\": container with ID starting with 77a43a647a8794245abd6b13da80083939f8a9c3197072bc5236c3f59de75f41 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.882416 4982 scope.go:117] "RemoveContainer" containerID="046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.882837 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9"} err="failed to get container status \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": rpc error: code = NotFound desc = could not find container \"046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9\": container with ID starting with 046db0a9bee7a8de7e4c260d0a479de29e643a17de89ae0be0da7afa368dbef9 not found: ID does not exist" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.882904 4982 scope.go:117] "RemoveContainer" containerID="70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa" Jan 22 05:58:34 crc kubenswrapper[4982]: I0122 05:58:34.883437 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa"} err="failed to get container status \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": rpc error: code = NotFound desc = could not find container \"70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa\": container with ID starting with 70baf7c58489facb56f1f9357d0d1af2771abf03be134d4aa46cdd9249b246fa not found: ID does not exist" Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597244 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"dee6d60f89d8a0156c43ed5cc2051ea58966effdddaff327b3dd9c1f1fbc89ca"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"63a67a7586a422314285d6ace3f80258c25487ea01a2556a3b86c4889b5c4f2f"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597319 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"426d28d1d84d6086e761a8692524502290c1f950e4e96691c4b97f3d7e763c61"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597333 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"78aa413da8d4cb4c42053305b8e7173136d5f21fc01052fda87aeeff8d03d964"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597346 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"2c24ea2a4d45c40920f1da7ab1f77b013c6553cbffbee1203a87e555c8aee05a"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.597359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"b6a3fed41c536ed8b900b66fb6dcb12e44301e5ffbdce486fdd35366e229a124"} Jan 22 05:58:35 crc kubenswrapper[4982]: I0122 05:58:35.748562 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45295ff5-bb7d-450f-9ff1-eeb4edb0d705" path="/var/lib/kubelet/pods/45295ff5-bb7d-450f-9ff1-eeb4edb0d705/volumes" Jan 22 05:58:38 crc kubenswrapper[4982]: I0122 05:58:38.626219 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"7f3a9ef4b7343d64215d7776b402750bc77f94eec6c23a3a503c0d6401281925"} Jan 22 05:58:41 crc kubenswrapper[4982]: I0122 05:58:41.657818 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" event={"ID":"e3ad852a-b27c-41c8-9c05-3165e4751755","Type":"ContainerStarted","Data":"5949cc6deeb20b6495dcab4d911ed41748447f65af471a656ab93c0b3c5eaa7b"} Jan 22 05:58:41 crc kubenswrapper[4982]: I0122 05:58:41.658878 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:41 crc kubenswrapper[4982]: I0122 05:58:41.658902 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:41 crc kubenswrapper[4982]: I0122 05:58:41.704525 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" podStartSLOduration=8.704501323 podStartE2EDuration="8.704501323s" podCreationTimestamp="2026-01-22 05:58:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:58:41.699979451 +0000 UTC m=+782.538617464" watchObservedRunningTime="2026-01-22 05:58:41.704501323 +0000 UTC m=+782.543139326" Jan 22 05:58:41 crc kubenswrapper[4982]: I0122 05:58:41.705265 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.451625 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-hvxgx"] Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.452995 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.455308 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.456894 4982 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-s9zg9" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.457307 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.457394 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.458204 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hvxgx"] Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.552132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2skw\" (UniqueName: \"kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.552293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.552716 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.654952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.655116 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.655245 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2skw\" (UniqueName: \"kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.655207 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.656625 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.685971 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.696542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2skw\" (UniqueName: \"kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw\") pod \"crc-storage-crc-hvxgx\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.728061 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:58:42 crc kubenswrapper[4982]: I0122 05:58:42.781489 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: E0122 05:58:42.831452 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(4e3e03e87538bbec806dbc8ecd9f1803c92f1addd74515ab676d01c1453f29a3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 05:58:42 crc kubenswrapper[4982]: E0122 05:58:42.831566 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(4e3e03e87538bbec806dbc8ecd9f1803c92f1addd74515ab676d01c1453f29a3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: E0122 05:58:42.831594 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(4e3e03e87538bbec806dbc8ecd9f1803c92f1addd74515ab676d01c1453f29a3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:42 crc kubenswrapper[4982]: E0122 05:58:42.831661 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-hvxgx_crc-storage(1ba6c4ea-fd99-43b6-84a9-26bb22ae800e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-hvxgx_crc-storage(1ba6c4ea-fd99-43b6-84a9-26bb22ae800e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(4e3e03e87538bbec806dbc8ecd9f1803c92f1addd74515ab676d01c1453f29a3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-hvxgx" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" Jan 22 05:58:43 crc kubenswrapper[4982]: I0122 05:58:43.692602 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:43 crc kubenswrapper[4982]: I0122 05:58:43.693356 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:43 crc kubenswrapper[4982]: E0122 05:58:43.739206 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(aa6c72e5d2201ee03eaa2c024a4a74519ea9d92ba91fd74d696bef5de66a7876): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 05:58:43 crc kubenswrapper[4982]: E0122 05:58:43.739315 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(aa6c72e5d2201ee03eaa2c024a4a74519ea9d92ba91fd74d696bef5de66a7876): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:43 crc kubenswrapper[4982]: E0122 05:58:43.739362 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(aa6c72e5d2201ee03eaa2c024a4a74519ea9d92ba91fd74d696bef5de66a7876): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:43 crc kubenswrapper[4982]: E0122 05:58:43.739466 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-hvxgx_crc-storage(1ba6c4ea-fd99-43b6-84a9-26bb22ae800e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-hvxgx_crc-storage(1ba6c4ea-fd99-43b6-84a9-26bb22ae800e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-hvxgx_crc-storage_1ba6c4ea-fd99-43b6-84a9-26bb22ae800e_0(aa6c72e5d2201ee03eaa2c024a4a74519ea9d92ba91fd74d696bef5de66a7876): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-hvxgx" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" Jan 22 05:58:48 crc kubenswrapper[4982]: I0122 05:58:48.974016 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:58:48 crc kubenswrapper[4982]: I0122 05:58:48.974430 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:58:58 crc kubenswrapper[4982]: I0122 05:58:58.718791 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:58 crc kubenswrapper[4982]: I0122 05:58:58.719937 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:58:59 crc kubenswrapper[4982]: I0122 05:58:59.050354 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hvxgx"] Jan 22 05:58:59 crc kubenswrapper[4982]: W0122 05:58:59.056691 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ba6c4ea_fd99_43b6_84a9_26bb22ae800e.slice/crio-60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a WatchSource:0}: Error finding container 60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a: Status 404 returned error can't find the container with id 60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a Jan 22 05:58:59 crc kubenswrapper[4982]: I0122 05:58:59.059789 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 05:58:59 crc kubenswrapper[4982]: I0122 05:58:59.838144 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hvxgx" event={"ID":"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e","Type":"ContainerStarted","Data":"60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a"} Jan 22 05:59:00 crc kubenswrapper[4982]: I0122 05:59:00.848357 4982 generic.go:334] "Generic (PLEG): container finished" podID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" containerID="20a205c4d63be561d27569fcb88388244ef62aa7090609a26178e1437adb91e5" exitCode=0 Jan 22 05:59:00 crc kubenswrapper[4982]: I0122 05:59:00.848481 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hvxgx" event={"ID":"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e","Type":"ContainerDied","Data":"20a205c4d63be561d27569fcb88388244ef62aa7090609a26178e1437adb91e5"} Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.204665 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.280610 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2skw\" (UniqueName: \"kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw\") pod \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.280772 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt\") pod \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.280869 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage\") pod \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\" (UID: \"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e\") " Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.281168 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" (UID: "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.281511 4982 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.288231 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw" (OuterVolumeSpecName: "kube-api-access-d2skw") pod "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" (UID: "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e"). InnerVolumeSpecName "kube-api-access-d2skw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.298496 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" (UID: "1ba6c4ea-fd99-43b6-84a9-26bb22ae800e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.382036 4982 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.382081 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2skw\" (UniqueName: \"kubernetes.io/projected/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e-kube-api-access-d2skw\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.868640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hvxgx" event={"ID":"1ba6c4ea-fd99-43b6-84a9-26bb22ae800e","Type":"ContainerDied","Data":"60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a"} Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.868713 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60ed7c0909f41d8714f16a2020afc00e963734f956b46c9b31c56e2061859a4a" Jan 22 05:59:02 crc kubenswrapper[4982]: I0122 05:59:02.868752 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hvxgx" Jan 22 05:59:04 crc kubenswrapper[4982]: I0122 05:59:04.138115 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5h6fl" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.241675 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f"] Jan 22 05:59:11 crc kubenswrapper[4982]: E0122 05:59:11.242829 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" containerName="storage" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.242913 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" containerName="storage" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.243065 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" containerName="storage" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.244137 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.249841 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f"] Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.253668 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.324714 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxd4t\" (UniqueName: \"kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.324878 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.324965 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.426911 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.426999 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.427057 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxd4t\" (UniqueName: \"kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.427785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.427785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.463018 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxd4t\" (UniqueName: \"kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.566624 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.888600 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f"] Jan 22 05:59:11 crc kubenswrapper[4982]: I0122 05:59:11.938450 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" event={"ID":"b2fc3062-08c7-4f2f-afda-05b7402cd2dd","Type":"ContainerStarted","Data":"9db6f0a488bf5a3b7eaa1147eb127c67734f293645d03676e7f73d074887fac0"} Jan 22 05:59:12 crc kubenswrapper[4982]: I0122 05:59:12.952052 4982 generic.go:334] "Generic (PLEG): container finished" podID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerID="debf785f374097d5fac51b8e188f557f3dce3f4be6b597d71136ea506585febd" exitCode=0 Jan 22 05:59:12 crc kubenswrapper[4982]: I0122 05:59:12.952550 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" event={"ID":"b2fc3062-08c7-4f2f-afda-05b7402cd2dd","Type":"ContainerDied","Data":"debf785f374097d5fac51b8e188f557f3dce3f4be6b597d71136ea506585febd"} Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.437189 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.441720 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.446602 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.480103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz27r\" (UniqueName: \"kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.480521 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.480632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.585290 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.585369 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.585398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz27r\" (UniqueName: \"kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.586325 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.586329 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.626043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz27r\" (UniqueName: \"kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r\") pod \"redhat-operators-prs28\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:13 crc kubenswrapper[4982]: I0122 05:59:13.781242 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:14 crc kubenswrapper[4982]: I0122 05:59:14.015039 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:14 crc kubenswrapper[4982]: W0122 05:59:14.028728 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a4d246c_8a4e_475d_ae5a_d4347ddd5764.slice/crio-402c15c13e95c772c117deb030a50d259199b30aca4059d99f365f144e290ecc WatchSource:0}: Error finding container 402c15c13e95c772c117deb030a50d259199b30aca4059d99f365f144e290ecc: Status 404 returned error can't find the container with id 402c15c13e95c772c117deb030a50d259199b30aca4059d99f365f144e290ecc Jan 22 05:59:14 crc kubenswrapper[4982]: I0122 05:59:14.968787 4982 generic.go:334] "Generic (PLEG): container finished" podID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerID="9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e" exitCode=0 Jan 22 05:59:14 crc kubenswrapper[4982]: I0122 05:59:14.968913 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerDied","Data":"9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e"} Jan 22 05:59:14 crc kubenswrapper[4982]: I0122 05:59:14.969317 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerStarted","Data":"402c15c13e95c772c117deb030a50d259199b30aca4059d99f365f144e290ecc"} Jan 22 05:59:15 crc kubenswrapper[4982]: I0122 05:59:15.984883 4982 generic.go:334] "Generic (PLEG): container finished" podID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerID="a9bf977fc510740b5d2567bcd6f8726e0c1795f544bd9baa80d65a2867410187" exitCode=0 Jan 22 05:59:15 crc kubenswrapper[4982]: I0122 05:59:15.985054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" event={"ID":"b2fc3062-08c7-4f2f-afda-05b7402cd2dd","Type":"ContainerDied","Data":"a9bf977fc510740b5d2567bcd6f8726e0c1795f544bd9baa80d65a2867410187"} Jan 22 05:59:15 crc kubenswrapper[4982]: I0122 05:59:15.991601 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerStarted","Data":"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140"} Jan 22 05:59:17 crc kubenswrapper[4982]: I0122 05:59:17.008092 4982 generic.go:334] "Generic (PLEG): container finished" podID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerID="d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140" exitCode=0 Jan 22 05:59:17 crc kubenswrapper[4982]: I0122 05:59:17.008591 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerDied","Data":"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140"} Jan 22 05:59:17 crc kubenswrapper[4982]: I0122 05:59:17.016899 4982 generic.go:334] "Generic (PLEG): container finished" podID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerID="d6905d7b63b9197a751eaf288db3924af0b3329eb40e981a5ca5bbd34d3b4b48" exitCode=0 Jan 22 05:59:17 crc kubenswrapper[4982]: I0122 05:59:17.016977 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" event={"ID":"b2fc3062-08c7-4f2f-afda-05b7402cd2dd","Type":"ContainerDied","Data":"d6905d7b63b9197a751eaf288db3924af0b3329eb40e981a5ca5bbd34d3b4b48"} Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.027567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerStarted","Data":"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546"} Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.066776 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-prs28" podStartSLOduration=2.365066891 podStartE2EDuration="5.066740049s" podCreationTimestamp="2026-01-22 05:59:13 +0000 UTC" firstStartedPulling="2026-01-22 05:59:14.971555601 +0000 UTC m=+815.810193614" lastFinishedPulling="2026-01-22 05:59:17.673228739 +0000 UTC m=+818.511866772" observedRunningTime="2026-01-22 05:59:18.057719852 +0000 UTC m=+818.896357885" watchObservedRunningTime="2026-01-22 05:59:18.066740049 +0000 UTC m=+818.905378062" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.350465 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.467253 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle\") pod \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.467350 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxd4t\" (UniqueName: \"kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t\") pod \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.467529 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util\") pod \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\" (UID: \"b2fc3062-08c7-4f2f-afda-05b7402cd2dd\") " Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.468156 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle" (OuterVolumeSpecName: "bundle") pod "b2fc3062-08c7-4f2f-afda-05b7402cd2dd" (UID: "b2fc3062-08c7-4f2f-afda-05b7402cd2dd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.480083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t" (OuterVolumeSpecName: "kube-api-access-rxd4t") pod "b2fc3062-08c7-4f2f-afda-05b7402cd2dd" (UID: "b2fc3062-08c7-4f2f-afda-05b7402cd2dd"). InnerVolumeSpecName "kube-api-access-rxd4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.492308 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util" (OuterVolumeSpecName: "util") pod "b2fc3062-08c7-4f2f-afda-05b7402cd2dd" (UID: "b2fc3062-08c7-4f2f-afda-05b7402cd2dd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.569530 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-util\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.569597 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.569618 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxd4t\" (UniqueName: \"kubernetes.io/projected/b2fc3062-08c7-4f2f-afda-05b7402cd2dd-kube-api-access-rxd4t\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.974050 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:59:18 crc kubenswrapper[4982]: I0122 05:59:18.974402 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:59:19 crc kubenswrapper[4982]: I0122 05:59:19.037345 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" Jan 22 05:59:19 crc kubenswrapper[4982]: I0122 05:59:19.037283 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f" event={"ID":"b2fc3062-08c7-4f2f-afda-05b7402cd2dd","Type":"ContainerDied","Data":"9db6f0a488bf5a3b7eaa1147eb127c67734f293645d03676e7f73d074887fac0"} Jan 22 05:59:19 crc kubenswrapper[4982]: I0122 05:59:19.037435 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9db6f0a488bf5a3b7eaa1147eb127c67734f293645d03676e7f73d074887fac0" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.786535 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-nv5l8"] Jan 22 05:59:21 crc kubenswrapper[4982]: E0122 05:59:21.787179 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="util" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.787197 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="util" Jan 22 05:59:21 crc kubenswrapper[4982]: E0122 05:59:21.787214 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="extract" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.787222 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="extract" Jan 22 05:59:21 crc kubenswrapper[4982]: E0122 05:59:21.787239 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="pull" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.787247 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="pull" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.787349 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2fc3062-08c7-4f2f-afda-05b7402cd2dd" containerName="extract" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.787797 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.790648 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.790718 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.790967 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-znxgb" Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.801977 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-nv5l8"] Jan 22 05:59:21 crc kubenswrapper[4982]: I0122 05:59:21.933669 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plvqg\" (UniqueName: \"kubernetes.io/projected/a58871fe-63e9-47ab-8f67-b9ec61f7b14a-kube-api-access-plvqg\") pod \"nmstate-operator-646758c888-nv5l8\" (UID: \"a58871fe-63e9-47ab-8f67-b9ec61f7b14a\") " pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" Jan 22 05:59:22 crc kubenswrapper[4982]: I0122 05:59:22.035029 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plvqg\" (UniqueName: \"kubernetes.io/projected/a58871fe-63e9-47ab-8f67-b9ec61f7b14a-kube-api-access-plvqg\") pod \"nmstate-operator-646758c888-nv5l8\" (UID: \"a58871fe-63e9-47ab-8f67-b9ec61f7b14a\") " pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" Jan 22 05:59:22 crc kubenswrapper[4982]: I0122 05:59:22.054308 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plvqg\" (UniqueName: \"kubernetes.io/projected/a58871fe-63e9-47ab-8f67-b9ec61f7b14a-kube-api-access-plvqg\") pod \"nmstate-operator-646758c888-nv5l8\" (UID: \"a58871fe-63e9-47ab-8f67-b9ec61f7b14a\") " pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" Jan 22 05:59:22 crc kubenswrapper[4982]: I0122 05:59:22.114240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" Jan 22 05:59:22 crc kubenswrapper[4982]: I0122 05:59:22.355414 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-nv5l8"] Jan 22 05:59:22 crc kubenswrapper[4982]: W0122 05:59:22.385262 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda58871fe_63e9_47ab_8f67_b9ec61f7b14a.slice/crio-123ecd04075180b2ca56f36488284982a1f1337790d28c68752ddf38b161637c WatchSource:0}: Error finding container 123ecd04075180b2ca56f36488284982a1f1337790d28c68752ddf38b161637c: Status 404 returned error can't find the container with id 123ecd04075180b2ca56f36488284982a1f1337790d28c68752ddf38b161637c Jan 22 05:59:23 crc kubenswrapper[4982]: I0122 05:59:23.068925 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" event={"ID":"a58871fe-63e9-47ab-8f67-b9ec61f7b14a","Type":"ContainerStarted","Data":"123ecd04075180b2ca56f36488284982a1f1337790d28c68752ddf38b161637c"} Jan 22 05:59:23 crc kubenswrapper[4982]: I0122 05:59:23.781577 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:23 crc kubenswrapper[4982]: I0122 05:59:23.782066 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:24 crc kubenswrapper[4982]: I0122 05:59:24.867636 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-prs28" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="registry-server" probeResult="failure" output=< Jan 22 05:59:24 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 05:59:24 crc kubenswrapper[4982]: > Jan 22 05:59:26 crc kubenswrapper[4982]: I0122 05:59:26.119942 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" event={"ID":"a58871fe-63e9-47ab-8f67-b9ec61f7b14a","Type":"ContainerStarted","Data":"73291d8b257d6d14675b795e5bb4236b4442f1c04ce5e0bd35e6b2e90ca971fb"} Jan 22 05:59:26 crc kubenswrapper[4982]: I0122 05:59:26.150133 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-nv5l8" podStartSLOduration=2.588189688 podStartE2EDuration="5.150098506s" podCreationTimestamp="2026-01-22 05:59:21 +0000 UTC" firstStartedPulling="2026-01-22 05:59:22.388462569 +0000 UTC m=+823.227100572" lastFinishedPulling="2026-01-22 05:59:24.950371377 +0000 UTC m=+825.789009390" observedRunningTime="2026-01-22 05:59:26.144807915 +0000 UTC m=+826.983446008" watchObservedRunningTime="2026-01-22 05:59:26.150098506 +0000 UTC m=+826.988736549" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.668579 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-gmdj6"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.670361 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.673084 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-7d6tx" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.679480 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-gmdj6"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.702277 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.703739 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.712004 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.754991 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.784543 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-q2hd9"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.785400 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.800097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh5ch\" (UniqueName: \"kubernetes.io/projected/d7966747-6b58-4d2a-b9c6-f85627f187d3-kube-api-access-mh5ch\") pod \"nmstate-metrics-54757c584b-gmdj6\" (UID: \"d7966747-6b58-4d2a-b9c6-f85627f187d3\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901359 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z2wx\" (UniqueName: \"kubernetes.io/projected/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-kube-api-access-7z2wx\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901415 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llfdq\" (UniqueName: \"kubernetes.io/projected/695d8ee6-67a2-42de-a169-5b823f7dce2b-kube-api-access-llfdq\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901466 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-dbus-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901488 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-nmstate-lock\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901506 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-ovs-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901535 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.901753 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh5ch\" (UniqueName: \"kubernetes.io/projected/d7966747-6b58-4d2a-b9c6-f85627f187d3-kube-api-access-mh5ch\") pod \"nmstate-metrics-54757c584b-gmdj6\" (UID: \"d7966747-6b58-4d2a-b9c6-f85627f187d3\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.929060 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.929909 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.932435 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.932612 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-jj4mh" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.933790 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh5ch\" (UniqueName: \"kubernetes.io/projected/d7966747-6b58-4d2a-b9c6-f85627f187d3-kube-api-access-mh5ch\") pod \"nmstate-metrics-54757c584b-gmdj6\" (UID: \"d7966747-6b58-4d2a-b9c6-f85627f187d3\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.938667 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.941558 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd"] Jan 22 05:59:31 crc kubenswrapper[4982]: I0122 05:59:31.995435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003491 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z2wx\" (UniqueName: \"kubernetes.io/projected/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-kube-api-access-7z2wx\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llfdq\" (UniqueName: \"kubernetes.io/projected/695d8ee6-67a2-42de-a169-5b823f7dce2b-kube-api-access-llfdq\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003572 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-nmstate-lock\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003589 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-dbus-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003605 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-ovs-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.003626 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: E0122 05:59:32.003787 4982 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 22 05:59:32 crc kubenswrapper[4982]: E0122 05:59:32.003882 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair podName:695d8ee6-67a2-42de-a169-5b823f7dce2b nodeName:}" failed. No retries permitted until 2026-01-22 05:59:32.503832895 +0000 UTC m=+833.342470898 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-d5tbt" (UID: "695d8ee6-67a2-42de-a169-5b823f7dce2b") : secret "openshift-nmstate-webhook" not found Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.005031 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-nmstate-lock\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.005300 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-dbus-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.005336 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-ovs-socket\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.023548 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z2wx\" (UniqueName: \"kubernetes.io/projected/2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8-kube-api-access-7z2wx\") pod \"nmstate-handler-q2hd9\" (UID: \"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8\") " pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.024516 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llfdq\" (UniqueName: \"kubernetes.io/projected/695d8ee6-67a2-42de-a169-5b823f7dce2b-kube-api-access-llfdq\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.104481 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c97357b-daac-4b3b-836c-ccd1710cb6bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.105086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fss7d\" (UniqueName: \"kubernetes.io/projected/7c97357b-daac-4b3b-836c-ccd1710cb6bd-kube-api-access-fss7d\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.105135 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7c97357b-daac-4b3b-836c-ccd1710cb6bd-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.108435 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.132012 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5c5576b4f5-jmbgc"] Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.132900 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: W0122 05:59:32.136804 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d9b1a0a_d7ca_4e76_ab0d_40cc990e4fb8.slice/crio-b0f710b0405d33968b6fe9d96fcebf150f252356d0ddd074fff6a032f5a4d81f WatchSource:0}: Error finding container b0f710b0405d33968b6fe9d96fcebf150f252356d0ddd074fff6a032f5a4d81f: Status 404 returned error can't find the container with id b0f710b0405d33968b6fe9d96fcebf150f252356d0ddd074fff6a032f5a4d81f Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.149329 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5c5576b4f5-jmbgc"] Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210756 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fss7d\" (UniqueName: \"kubernetes.io/projected/7c97357b-daac-4b3b-836c-ccd1710cb6bd-kube-api-access-fss7d\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210840 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-oauth-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210891 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzb8c\" (UniqueName: \"kubernetes.io/projected/0cf038d5-f119-47ff-a257-88ace06c9fe5-kube-api-access-nzb8c\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210929 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7c97357b-daac-4b3b-836c-ccd1710cb6bd-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210955 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-service-ca\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.210978 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-oauth-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.211007 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-trusted-ca-bundle\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.211036 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.211081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c97357b-daac-4b3b-836c-ccd1710cb6bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.211104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.211895 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7c97357b-daac-4b3b-836c-ccd1710cb6bd-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.213709 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-q2hd9" event={"ID":"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8","Type":"ContainerStarted","Data":"b0f710b0405d33968b6fe9d96fcebf150f252356d0ddd074fff6a032f5a4d81f"} Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.221483 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c97357b-daac-4b3b-836c-ccd1710cb6bd-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.237552 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fss7d\" (UniqueName: \"kubernetes.io/projected/7c97357b-daac-4b3b-836c-ccd1710cb6bd-kube-api-access-fss7d\") pod \"nmstate-console-plugin-7754f76f8b-kg7hd\" (UID: \"7c97357b-daac-4b3b-836c-ccd1710cb6bd\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.267530 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.312719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-oauth-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313145 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-trusted-ca-bundle\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313199 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313257 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313304 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-oauth-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzb8c\" (UniqueName: \"kubernetes.io/projected/0cf038d5-f119-47ff-a257-88ace06c9fe5-kube-api-access-nzb8c\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313361 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-service-ca\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.313801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-oauth-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.314130 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.314572 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-service-ca\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.315168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cf038d5-f119-47ff-a257-88ace06c9fe5-trusted-ca-bundle\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.320217 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-oauth-config\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.321215 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf038d5-f119-47ff-a257-88ace06c9fe5-console-serving-cert\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.331132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzb8c\" (UniqueName: \"kubernetes.io/projected/0cf038d5-f119-47ff-a257-88ace06c9fe5-kube-api-access-nzb8c\") pod \"console-5c5576b4f5-jmbgc\" (UID: \"0cf038d5-f119-47ff-a257-88ace06c9fe5\") " pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.381519 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-gmdj6"] Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.468818 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd"] Jan 22 05:59:32 crc kubenswrapper[4982]: W0122 05:59:32.473392 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c97357b_daac_4b3b_836c_ccd1710cb6bd.slice/crio-2c67214745145ad87c2d2dc75840c62fe1ba1486b6d6c42cbee44f89cff76510 WatchSource:0}: Error finding container 2c67214745145ad87c2d2dc75840c62fe1ba1486b6d6c42cbee44f89cff76510: Status 404 returned error can't find the container with id 2c67214745145ad87c2d2dc75840c62fe1ba1486b6d6c42cbee44f89cff76510 Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.516176 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.524030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/695d8ee6-67a2-42de-a169-5b823f7dce2b-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-d5tbt\" (UID: \"695d8ee6-67a2-42de-a169-5b823f7dce2b\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.538433 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.645621 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:32 crc kubenswrapper[4982]: W0122 05:59:32.977114 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cf038d5_f119_47ff_a257_88ace06c9fe5.slice/crio-87daeda5da6e0c2f5873bf9d1bd49b4fb415813c2de2ea4f473a61d9f9e8d6a6 WatchSource:0}: Error finding container 87daeda5da6e0c2f5873bf9d1bd49b4fb415813c2de2ea4f473a61d9f9e8d6a6: Status 404 returned error can't find the container with id 87daeda5da6e0c2f5873bf9d1bd49b4fb415813c2de2ea4f473a61d9f9e8d6a6 Jan 22 05:59:32 crc kubenswrapper[4982]: I0122 05:59:32.977583 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5c5576b4f5-jmbgc"] Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.110652 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt"] Jan 22 05:59:33 crc kubenswrapper[4982]: W0122 05:59:33.123481 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod695d8ee6_67a2_42de_a169_5b823f7dce2b.slice/crio-ed0d70edb45be5c351b97dc45456b59489a8f4e9db3c11d681697ae8061015fc WatchSource:0}: Error finding container ed0d70edb45be5c351b97dc45456b59489a8f4e9db3c11d681697ae8061015fc: Status 404 returned error can't find the container with id ed0d70edb45be5c351b97dc45456b59489a8f4e9db3c11d681697ae8061015fc Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.224246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" event={"ID":"d7966747-6b58-4d2a-b9c6-f85627f187d3","Type":"ContainerStarted","Data":"8a1825f6318f51f906e4ed7c6e8a9b0cc1d913cb5e71db3b47c0cb201a426d6e"} Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.228074 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" event={"ID":"7c97357b-daac-4b3b-836c-ccd1710cb6bd","Type":"ContainerStarted","Data":"2c67214745145ad87c2d2dc75840c62fe1ba1486b6d6c42cbee44f89cff76510"} Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.229960 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" event={"ID":"695d8ee6-67a2-42de-a169-5b823f7dce2b","Type":"ContainerStarted","Data":"ed0d70edb45be5c351b97dc45456b59489a8f4e9db3c11d681697ae8061015fc"} Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.231809 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5c5576b4f5-jmbgc" event={"ID":"0cf038d5-f119-47ff-a257-88ace06c9fe5","Type":"ContainerStarted","Data":"87daeda5da6e0c2f5873bf9d1bd49b4fb415813c2de2ea4f473a61d9f9e8d6a6"} Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.867111 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:33 crc kubenswrapper[4982]: I0122 05:59:33.944402 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:34 crc kubenswrapper[4982]: I0122 05:59:34.127169 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:34 crc kubenswrapper[4982]: I0122 05:59:34.247096 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5c5576b4f5-jmbgc" event={"ID":"0cf038d5-f119-47ff-a257-88ace06c9fe5","Type":"ContainerStarted","Data":"1c70efc783f2dc8cd4e8c27ab24b2d31f75197251cd91933ead74b4b0baa1555"} Jan 22 05:59:34 crc kubenswrapper[4982]: I0122 05:59:34.278156 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5c5576b4f5-jmbgc" podStartSLOduration=2.278082995 podStartE2EDuration="2.278082995s" podCreationTimestamp="2026-01-22 05:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 05:59:34.270493622 +0000 UTC m=+835.109131655" watchObservedRunningTime="2026-01-22 05:59:34.278082995 +0000 UTC m=+835.116720998" Jan 22 05:59:35 crc kubenswrapper[4982]: I0122 05:59:35.255723 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-prs28" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="registry-server" containerID="cri-o://a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546" gracePeriod=2 Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.001002 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.176245 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz27r\" (UniqueName: \"kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r\") pod \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.176316 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content\") pod \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.177415 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities\") pod \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\" (UID: \"5a4d246c-8a4e-475d-ae5a-d4347ddd5764\") " Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.178603 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities" (OuterVolumeSpecName: "utilities") pod "5a4d246c-8a4e-475d-ae5a-d4347ddd5764" (UID: "5a4d246c-8a4e-475d-ae5a-d4347ddd5764"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.184791 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r" (OuterVolumeSpecName: "kube-api-access-qz27r") pod "5a4d246c-8a4e-475d-ae5a-d4347ddd5764" (UID: "5a4d246c-8a4e-475d-ae5a-d4347ddd5764"). InnerVolumeSpecName "kube-api-access-qz27r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.264785 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" event={"ID":"d7966747-6b58-4d2a-b9c6-f85627f187d3","Type":"ContainerStarted","Data":"21e848bfb197bc33c5d7fbf26b49e3da65beacbf671c6de92707a1f1937b5856"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.266957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" event={"ID":"7c97357b-daac-4b3b-836c-ccd1710cb6bd","Type":"ContainerStarted","Data":"ba85b3e4073eebb4b88f00b3d7b45d3ac4d646e1ba65af2af0eb05bba8d64ee1"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.268991 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" event={"ID":"695d8ee6-67a2-42de-a169-5b823f7dce2b","Type":"ContainerStarted","Data":"f51167b36e073759e955f3596b1d05a5caa842343a15637db357e1fa25904fc2"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.269933 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.274520 4982 generic.go:334] "Generic (PLEG): container finished" podID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerID="a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546" exitCode=0 Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.274607 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prs28" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.274620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerDied","Data":"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.274660 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prs28" event={"ID":"5a4d246c-8a4e-475d-ae5a-d4347ddd5764","Type":"ContainerDied","Data":"402c15c13e95c772c117deb030a50d259199b30aca4059d99f365f144e290ecc"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.274690 4982 scope.go:117] "RemoveContainer" containerID="a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.278779 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.278908 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz27r\" (UniqueName: \"kubernetes.io/projected/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-kube-api-access-qz27r\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.279092 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-q2hd9" event={"ID":"2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8","Type":"ContainerStarted","Data":"3a2e2f077b76974ff4f332b53361f988592cb85eb87cb6fd6a6e7d61d64ad424"} Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.279271 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.308387 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-kg7hd" podStartSLOduration=1.977512037 podStartE2EDuration="5.308362051s" podCreationTimestamp="2026-01-22 05:59:31 +0000 UTC" firstStartedPulling="2026-01-22 05:59:32.476204619 +0000 UTC m=+833.314842622" lastFinishedPulling="2026-01-22 05:59:35.807054623 +0000 UTC m=+836.645692636" observedRunningTime="2026-01-22 05:59:36.294259188 +0000 UTC m=+837.132897221" watchObservedRunningTime="2026-01-22 05:59:36.308362051 +0000 UTC m=+837.147000254" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.309711 4982 scope.go:117] "RemoveContainer" containerID="d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.325963 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" podStartSLOduration=2.6344496299999998 podStartE2EDuration="5.325934164s" podCreationTimestamp="2026-01-22 05:59:31 +0000 UTC" firstStartedPulling="2026-01-22 05:59:33.129644611 +0000 UTC m=+833.968282624" lastFinishedPulling="2026-01-22 05:59:35.821129145 +0000 UTC m=+836.659767158" observedRunningTime="2026-01-22 05:59:36.322845562 +0000 UTC m=+837.161483575" watchObservedRunningTime="2026-01-22 05:59:36.325934164 +0000 UTC m=+837.164572167" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.345803 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-q2hd9" podStartSLOduration=1.672197267 podStartE2EDuration="5.345781968s" podCreationTimestamp="2026-01-22 05:59:31 +0000 UTC" firstStartedPulling="2026-01-22 05:59:32.14516929 +0000 UTC m=+832.983807293" lastFinishedPulling="2026-01-22 05:59:35.818753981 +0000 UTC m=+836.657391994" observedRunningTime="2026-01-22 05:59:36.345353428 +0000 UTC m=+837.183991441" watchObservedRunningTime="2026-01-22 05:59:36.345781968 +0000 UTC m=+837.184419991" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.346751 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a4d246c-8a4e-475d-ae5a-d4347ddd5764" (UID: "5a4d246c-8a4e-475d-ae5a-d4347ddd5764"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.347315 4982 scope.go:117] "RemoveContainer" containerID="9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.368342 4982 scope.go:117] "RemoveContainer" containerID="a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546" Jan 22 05:59:36 crc kubenswrapper[4982]: E0122 05:59:36.368976 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546\": container with ID starting with a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546 not found: ID does not exist" containerID="a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.369047 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546"} err="failed to get container status \"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546\": rpc error: code = NotFound desc = could not find container \"a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546\": container with ID starting with a411d58138263c10aae4801c8d504dc71e0f1d7b83cf2a36a49b79f3f3623546 not found: ID does not exist" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.369081 4982 scope.go:117] "RemoveContainer" containerID="d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140" Jan 22 05:59:36 crc kubenswrapper[4982]: E0122 05:59:36.369909 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140\": container with ID starting with d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140 not found: ID does not exist" containerID="d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.369935 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140"} err="failed to get container status \"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140\": rpc error: code = NotFound desc = could not find container \"d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140\": container with ID starting with d0a2765475fc7bc94664dbf63d1b92bea1a40cf85078f22778706c1e22507140 not found: ID does not exist" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.369951 4982 scope.go:117] "RemoveContainer" containerID="9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e" Jan 22 05:59:36 crc kubenswrapper[4982]: E0122 05:59:36.370358 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e\": container with ID starting with 9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e not found: ID does not exist" containerID="9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.370414 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e"} err="failed to get container status \"9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e\": rpc error: code = NotFound desc = could not find container \"9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e\": container with ID starting with 9415a8636b0bf01aa12cac5d69052b0fd773d0d6103002afc1cdc0f7df4bbc9e not found: ID does not exist" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.381267 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d246c-8a4e-475d-ae5a-d4347ddd5764-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.630513 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:36 crc kubenswrapper[4982]: I0122 05:59:36.638037 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-prs28"] Jan 22 05:59:37 crc kubenswrapper[4982]: I0122 05:59:37.730786 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" path="/var/lib/kubelet/pods/5a4d246c-8a4e-475d-ae5a-d4347ddd5764/volumes" Jan 22 05:59:39 crc kubenswrapper[4982]: I0122 05:59:39.308359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" event={"ID":"d7966747-6b58-4d2a-b9c6-f85627f187d3","Type":"ContainerStarted","Data":"f2cec68786be43e79f912da71147bbcb6934cc6a408a47a29ae05250a36e1a7c"} Jan 22 05:59:39 crc kubenswrapper[4982]: I0122 05:59:39.336993 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-gmdj6" podStartSLOduration=2.187509386 podStartE2EDuration="8.336957744s" podCreationTimestamp="2026-01-22 05:59:31 +0000 UTC" firstStartedPulling="2026-01-22 05:59:32.394316985 +0000 UTC m=+833.232954988" lastFinishedPulling="2026-01-22 05:59:38.543765343 +0000 UTC m=+839.382403346" observedRunningTime="2026-01-22 05:59:39.333009964 +0000 UTC m=+840.171648037" watchObservedRunningTime="2026-01-22 05:59:39.336957744 +0000 UTC m=+840.175595807" Jan 22 05:59:42 crc kubenswrapper[4982]: I0122 05:59:42.148136 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-q2hd9" Jan 22 05:59:42 crc kubenswrapper[4982]: I0122 05:59:42.539716 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:42 crc kubenswrapper[4982]: I0122 05:59:42.540341 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:42 crc kubenswrapper[4982]: I0122 05:59:42.548318 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:43 crc kubenswrapper[4982]: I0122 05:59:43.348671 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5c5576b4f5-jmbgc" Jan 22 05:59:43 crc kubenswrapper[4982]: I0122 05:59:43.447942 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 05:59:48 crc kubenswrapper[4982]: I0122 05:59:48.974036 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 05:59:48 crc kubenswrapper[4982]: I0122 05:59:48.974626 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 05:59:48 crc kubenswrapper[4982]: I0122 05:59:48.974724 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 05:59:48 crc kubenswrapper[4982]: I0122 05:59:48.975902 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 05:59:48 crc kubenswrapper[4982]: I0122 05:59:48.976015 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f" gracePeriod=600 Jan 22 05:59:49 crc kubenswrapper[4982]: I0122 05:59:49.396972 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f" exitCode=0 Jan 22 05:59:49 crc kubenswrapper[4982]: I0122 05:59:49.397067 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f"} Jan 22 05:59:49 crc kubenswrapper[4982]: I0122 05:59:49.397713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041"} Jan 22 05:59:49 crc kubenswrapper[4982]: I0122 05:59:49.397759 4982 scope.go:117] "RemoveContainer" containerID="5833ee8c65cce1da38ef4e61b883dee26a7be0515be5664fa83d44be257f62b7" Jan 22 05:59:52 crc kubenswrapper[4982]: I0122 05:59:52.656383 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-d5tbt" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.185098 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh"] Jan 22 06:00:00 crc kubenswrapper[4982]: E0122 06:00:00.186268 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="registry-server" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.186287 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="registry-server" Jan 22 06:00:00 crc kubenswrapper[4982]: E0122 06:00:00.186313 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="extract-content" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.186322 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="extract-content" Jan 22 06:00:00 crc kubenswrapper[4982]: E0122 06:00:00.186338 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="extract-utilities" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.186349 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="extract-utilities" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.186467 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a4d246c-8a4e-475d-ae5a-d4347ddd5764" containerName="registry-server" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.186985 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.192236 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.193127 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.205104 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh"] Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.208079 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kt8c\" (UniqueName: \"kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.208164 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.208245 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.309374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.309516 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kt8c\" (UniqueName: \"kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.309573 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.310710 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.318557 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.327251 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kt8c\" (UniqueName: \"kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c\") pod \"collect-profiles-29484360-9kjlh\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:00 crc kubenswrapper[4982]: I0122 06:00:00.512268 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:01 crc kubenswrapper[4982]: I0122 06:00:01.052494 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh"] Jan 22 06:00:01 crc kubenswrapper[4982]: I0122 06:00:01.505012 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" event={"ID":"fa8c80f1-26e6-488f-aefc-5213ad570754","Type":"ContainerStarted","Data":"ddd77a3e93f4b3a83945c0e1f48ddb33321fdc3509f7b7480adc59252c5e333d"} Jan 22 06:00:01 crc kubenswrapper[4982]: I0122 06:00:01.505082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" event={"ID":"fa8c80f1-26e6-488f-aefc-5213ad570754","Type":"ContainerStarted","Data":"a5d1075f26d2b4f09b0549e752ea7c935a8926928b1ed9fa97943be0686fc37f"} Jan 22 06:00:01 crc kubenswrapper[4982]: I0122 06:00:01.530405 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" podStartSLOduration=1.530376621 podStartE2EDuration="1.530376621s" podCreationTimestamp="2026-01-22 06:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:00:01.52560529 +0000 UTC m=+862.364243323" watchObservedRunningTime="2026-01-22 06:00:01.530376621 +0000 UTC m=+862.369014634" Jan 22 06:00:02 crc kubenswrapper[4982]: I0122 06:00:02.515027 4982 generic.go:334] "Generic (PLEG): container finished" podID="fa8c80f1-26e6-488f-aefc-5213ad570754" containerID="ddd77a3e93f4b3a83945c0e1f48ddb33321fdc3509f7b7480adc59252c5e333d" exitCode=0 Jan 22 06:00:02 crc kubenswrapper[4982]: I0122 06:00:02.515117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" event={"ID":"fa8c80f1-26e6-488f-aefc-5213ad570754","Type":"ContainerDied","Data":"ddd77a3e93f4b3a83945c0e1f48ddb33321fdc3509f7b7480adc59252c5e333d"} Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.835655 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.978041 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kt8c\" (UniqueName: \"kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c\") pod \"fa8c80f1-26e6-488f-aefc-5213ad570754\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.978201 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume\") pod \"fa8c80f1-26e6-488f-aefc-5213ad570754\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.978253 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume\") pod \"fa8c80f1-26e6-488f-aefc-5213ad570754\" (UID: \"fa8c80f1-26e6-488f-aefc-5213ad570754\") " Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.979462 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume" (OuterVolumeSpecName: "config-volume") pod "fa8c80f1-26e6-488f-aefc-5213ad570754" (UID: "fa8c80f1-26e6-488f-aefc-5213ad570754"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.980189 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa8c80f1-26e6-488f-aefc-5213ad570754-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.987449 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fa8c80f1-26e6-488f-aefc-5213ad570754" (UID: "fa8c80f1-26e6-488f-aefc-5213ad570754"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:00:03 crc kubenswrapper[4982]: I0122 06:00:03.994452 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c" (OuterVolumeSpecName: "kube-api-access-9kt8c") pod "fa8c80f1-26e6-488f-aefc-5213ad570754" (UID: "fa8c80f1-26e6-488f-aefc-5213ad570754"). InnerVolumeSpecName "kube-api-access-9kt8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:00:04 crc kubenswrapper[4982]: I0122 06:00:04.081386 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kt8c\" (UniqueName: \"kubernetes.io/projected/fa8c80f1-26e6-488f-aefc-5213ad570754-kube-api-access-9kt8c\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:04 crc kubenswrapper[4982]: I0122 06:00:04.081427 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa8c80f1-26e6-488f-aefc-5213ad570754-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:04 crc kubenswrapper[4982]: I0122 06:00:04.537303 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" event={"ID":"fa8c80f1-26e6-488f-aefc-5213ad570754","Type":"ContainerDied","Data":"a5d1075f26d2b4f09b0549e752ea7c935a8926928b1ed9fa97943be0686fc37f"} Jan 22 06:00:04 crc kubenswrapper[4982]: I0122 06:00:04.537788 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5d1075f26d2b4f09b0549e752ea7c935a8926928b1ed9fa97943be0686fc37f" Jan 22 06:00:04 crc kubenswrapper[4982]: I0122 06:00:04.537407 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.502631 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-2rfct" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" containerID="cri-o://a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737" gracePeriod=15 Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.971166 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2rfct_a9dca4eb-9076-4a32-a851-55d4649cdbf2/console/0.log" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.971550 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xxqb\" (UniqueName: \"kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984654 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984768 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.984817 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config\") pod \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\" (UID: \"a9dca4eb-9076-4a32-a851-55d4649cdbf2\") " Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.986087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.986224 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config" (OuterVolumeSpecName: "console-config") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.986279 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.986316 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca" (OuterVolumeSpecName: "service-ca") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:00:08 crc kubenswrapper[4982]: I0122 06:00:08.996195 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.001966 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb" (OuterVolumeSpecName: "kube-api-access-2xxqb") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "kube-api-access-2xxqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.002137 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "a9dca4eb-9076-4a32-a851-55d4649cdbf2" (UID: "a9dca4eb-9076-4a32-a851-55d4649cdbf2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.086695 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.086938 4982 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.087060 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xxqb\" (UniqueName: \"kubernetes.io/projected/a9dca4eb-9076-4a32-a851-55d4649cdbf2-kube-api-access-2xxqb\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.087163 4982 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.087175 4982 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.087185 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9dca4eb-9076-4a32-a851-55d4649cdbf2-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.087197 4982 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a9dca4eb-9076-4a32-a851-55d4649cdbf2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578333 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2rfct_a9dca4eb-9076-4a32-a851-55d4649cdbf2/console/0.log" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578401 4982 generic.go:334] "Generic (PLEG): container finished" podID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerID="a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737" exitCode=2 Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578443 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2rfct" event={"ID":"a9dca4eb-9076-4a32-a851-55d4649cdbf2","Type":"ContainerDied","Data":"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737"} Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578483 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2rfct" event={"ID":"a9dca4eb-9076-4a32-a851-55d4649cdbf2","Type":"ContainerDied","Data":"362441eef66e1d7d4a3ad9f0e14e27d42c25624f62c9e85b717b4c38b9b3564f"} Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578508 4982 scope.go:117] "RemoveContainer" containerID="a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.578618 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2rfct" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.612321 4982 scope.go:117] "RemoveContainer" containerID="a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737" Jan 22 06:00:09 crc kubenswrapper[4982]: E0122 06:00:09.613136 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737\": container with ID starting with a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737 not found: ID does not exist" containerID="a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.613191 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737"} err="failed to get container status \"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737\": rpc error: code = NotFound desc = could not find container \"a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737\": container with ID starting with a0232455e51d10b6d9b653a4f09498a4df9a02bc2dc7d298c914fae0c6ecb737 not found: ID does not exist" Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.633354 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.641527 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-2rfct"] Jan 22 06:00:09 crc kubenswrapper[4982]: I0122 06:00:09.735725 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" path="/var/lib/kubelet/pods/a9dca4eb-9076-4a32-a851-55d4649cdbf2/volumes" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.944989 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr"] Jan 22 06:00:10 crc kubenswrapper[4982]: E0122 06:00:10.945300 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.945317 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" Jan 22 06:00:10 crc kubenswrapper[4982]: E0122 06:00:10.945337 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa8c80f1-26e6-488f-aefc-5213ad570754" containerName="collect-profiles" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.945345 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa8c80f1-26e6-488f-aefc-5213ad570754" containerName="collect-profiles" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.945534 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa8c80f1-26e6-488f-aefc-5213ad570754" containerName="collect-profiles" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.945546 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9dca4eb-9076-4a32-a851-55d4649cdbf2" containerName="console" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.946606 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.949380 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 06:00:10 crc kubenswrapper[4982]: I0122 06:00:10.962237 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr"] Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.137225 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.137806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.137884 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cphlp\" (UniqueName: \"kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.239331 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.239432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cphlp\" (UniqueName: \"kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.239480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.240033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.240144 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.270151 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cphlp\" (UniqueName: \"kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.564509 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:11 crc kubenswrapper[4982]: I0122 06:00:11.891676 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr"] Jan 22 06:00:12 crc kubenswrapper[4982]: I0122 06:00:12.609489 4982 generic.go:334] "Generic (PLEG): container finished" podID="20024115-1f58-4a08-93de-543612c86368" containerID="5d05dc27b40e570c0168d742134e9e3c58d9e6989d5f9524fe096d553bf03533" exitCode=0 Jan 22 06:00:12 crc kubenswrapper[4982]: I0122 06:00:12.609597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" event={"ID":"20024115-1f58-4a08-93de-543612c86368","Type":"ContainerDied","Data":"5d05dc27b40e570c0168d742134e9e3c58d9e6989d5f9524fe096d553bf03533"} Jan 22 06:00:12 crc kubenswrapper[4982]: I0122 06:00:12.609905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" event={"ID":"20024115-1f58-4a08-93de-543612c86368","Type":"ContainerStarted","Data":"0c8b52602d82a3dceb2cb932ea7ec43356566d47b1ea5116575c254c783bf891"} Jan 22 06:00:14 crc kubenswrapper[4982]: I0122 06:00:14.631146 4982 generic.go:334] "Generic (PLEG): container finished" podID="20024115-1f58-4a08-93de-543612c86368" containerID="2b61e51ecd9c8df554b606998f98d778937e17da0d420dd78b167993bb64f070" exitCode=0 Jan 22 06:00:14 crc kubenswrapper[4982]: I0122 06:00:14.631327 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" event={"ID":"20024115-1f58-4a08-93de-543612c86368","Type":"ContainerDied","Data":"2b61e51ecd9c8df554b606998f98d778937e17da0d420dd78b167993bb64f070"} Jan 22 06:00:15 crc kubenswrapper[4982]: I0122 06:00:15.643927 4982 generic.go:334] "Generic (PLEG): container finished" podID="20024115-1f58-4a08-93de-543612c86368" containerID="f9d677eb4ef052bff176352ee6598462fbc4a1d06272c95e9444a9e74647875f" exitCode=0 Jan 22 06:00:15 crc kubenswrapper[4982]: I0122 06:00:15.644002 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" event={"ID":"20024115-1f58-4a08-93de-543612c86368","Type":"ContainerDied","Data":"f9d677eb4ef052bff176352ee6598462fbc4a1d06272c95e9444a9e74647875f"} Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.004584 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.148467 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util\") pod \"20024115-1f58-4a08-93de-543612c86368\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.148569 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle\") pod \"20024115-1f58-4a08-93de-543612c86368\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.148685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cphlp\" (UniqueName: \"kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp\") pod \"20024115-1f58-4a08-93de-543612c86368\" (UID: \"20024115-1f58-4a08-93de-543612c86368\") " Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.150138 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle" (OuterVolumeSpecName: "bundle") pod "20024115-1f58-4a08-93de-543612c86368" (UID: "20024115-1f58-4a08-93de-543612c86368"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.158264 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp" (OuterVolumeSpecName: "kube-api-access-cphlp") pod "20024115-1f58-4a08-93de-543612c86368" (UID: "20024115-1f58-4a08-93de-543612c86368"). InnerVolumeSpecName "kube-api-access-cphlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.169088 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util" (OuterVolumeSpecName: "util") pod "20024115-1f58-4a08-93de-543612c86368" (UID: "20024115-1f58-4a08-93de-543612c86368"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.250770 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-util\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.251323 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20024115-1f58-4a08-93de-543612c86368-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.251338 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cphlp\" (UniqueName: \"kubernetes.io/projected/20024115-1f58-4a08-93de-543612c86368-kube-api-access-cphlp\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.664258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" event={"ID":"20024115-1f58-4a08-93de-543612c86368","Type":"ContainerDied","Data":"0c8b52602d82a3dceb2cb932ea7ec43356566d47b1ea5116575c254c783bf891"} Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.664327 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c8b52602d82a3dceb2cb932ea7ec43356566d47b1ea5116575c254c783bf891" Jan 22 06:00:17 crc kubenswrapper[4982]: I0122 06:00:17.664330 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.595430 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj"] Jan 22 06:00:26 crc kubenswrapper[4982]: E0122 06:00:26.596166 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="pull" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.596179 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="pull" Jan 22 06:00:26 crc kubenswrapper[4982]: E0122 06:00:26.596192 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="extract" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.596198 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="extract" Jan 22 06:00:26 crc kubenswrapper[4982]: E0122 06:00:26.596209 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="util" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.596215 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="util" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.596308 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="20024115-1f58-4a08-93de-543612c86368" containerName="extract" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.596718 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.599731 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.599755 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.600011 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.600388 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.600668 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-lz9xv" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.646665 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj"] Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.649144 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-webhook-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.649218 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm9bp\" (UniqueName: \"kubernetes.io/projected/44543400-72e1-4ae7-976e-901b6ebac4b2-kube-api-access-xm9bp\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.649293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-apiservice-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.749792 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-apiservice-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.749841 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-webhook-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.749906 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm9bp\" (UniqueName: \"kubernetes.io/projected/44543400-72e1-4ae7-976e-901b6ebac4b2-kube-api-access-xm9bp\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.758289 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-apiservice-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.772319 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm9bp\" (UniqueName: \"kubernetes.io/projected/44543400-72e1-4ae7-976e-901b6ebac4b2-kube-api-access-xm9bp\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.773326 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44543400-72e1-4ae7-976e-901b6ebac4b2-webhook-cert\") pod \"metallb-operator-controller-manager-5bc8cc58c6-wjczj\" (UID: \"44543400-72e1-4ae7-976e-901b6ebac4b2\") " pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.857619 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77"] Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.858346 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.864687 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.864777 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.864993 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-vnkgk" Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.872193 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77"] Jan 22 06:00:26 crc kubenswrapper[4982]: I0122 06:00:26.914949 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.063759 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg6q4\" (UniqueName: \"kubernetes.io/projected/2b28eec2-730e-422f-b384-8b60cb00a45f-kube-api-access-rg6q4\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.064164 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-apiservice-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.064191 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-webhook-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.151010 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj"] Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.165175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg6q4\" (UniqueName: \"kubernetes.io/projected/2b28eec2-730e-422f-b384-8b60cb00a45f-kube-api-access-rg6q4\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.165209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-apiservice-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.165232 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-webhook-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.185262 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-webhook-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.187222 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg6q4\" (UniqueName: \"kubernetes.io/projected/2b28eec2-730e-422f-b384-8b60cb00a45f-kube-api-access-rg6q4\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.190181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b28eec2-730e-422f-b384-8b60cb00a45f-apiservice-cert\") pod \"metallb-operator-webhook-server-579c7888b9-9pz77\" (UID: \"2b28eec2-730e-422f-b384-8b60cb00a45f\") " pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.476233 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.734593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" event={"ID":"44543400-72e1-4ae7-976e-901b6ebac4b2","Type":"ContainerStarted","Data":"8620dfde4f775eefad74f66b6e8245020e2b72954978054abea36f2b69df88a0"} Jan 22 06:00:27 crc kubenswrapper[4982]: I0122 06:00:27.740657 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77"] Jan 22 06:00:27 crc kubenswrapper[4982]: W0122 06:00:27.743102 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b28eec2_730e_422f_b384_8b60cb00a45f.slice/crio-84a5d8d7d676f4ea750c84ae6e5e266c1b94d8abfc69a218a365c24692420c7a WatchSource:0}: Error finding container 84a5d8d7d676f4ea750c84ae6e5e266c1b94d8abfc69a218a365c24692420c7a: Status 404 returned error can't find the container with id 84a5d8d7d676f4ea750c84ae6e5e266c1b94d8abfc69a218a365c24692420c7a Jan 22 06:00:28 crc kubenswrapper[4982]: I0122 06:00:28.739518 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" event={"ID":"2b28eec2-730e-422f-b384-8b60cb00a45f","Type":"ContainerStarted","Data":"84a5d8d7d676f4ea750c84ae6e5e266c1b94d8abfc69a218a365c24692420c7a"} Jan 22 06:00:30 crc kubenswrapper[4982]: I0122 06:00:30.781579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" event={"ID":"44543400-72e1-4ae7-976e-901b6ebac4b2","Type":"ContainerStarted","Data":"495cfb91dcf7146d70f86f93dfe49b0623af01f098288049c847f1807a3238a2"} Jan 22 06:00:30 crc kubenswrapper[4982]: I0122 06:00:30.782132 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:00:30 crc kubenswrapper[4982]: I0122 06:00:30.816587 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" podStartSLOduration=1.704349583 podStartE2EDuration="4.816568041s" podCreationTimestamp="2026-01-22 06:00:26 +0000 UTC" firstStartedPulling="2026-01-22 06:00:27.16656279 +0000 UTC m=+888.005200793" lastFinishedPulling="2026-01-22 06:00:30.278781248 +0000 UTC m=+891.117419251" observedRunningTime="2026-01-22 06:00:30.815225433 +0000 UTC m=+891.653863436" watchObservedRunningTime="2026-01-22 06:00:30.816568041 +0000 UTC m=+891.655206044" Jan 22 06:00:32 crc kubenswrapper[4982]: I0122 06:00:32.798675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" event={"ID":"2b28eec2-730e-422f-b384-8b60cb00a45f","Type":"ContainerStarted","Data":"fa2cc0961589956c4478d185b64f3bea76fbcbd31036a5b50337ae2fa8718ad2"} Jan 22 06:00:32 crc kubenswrapper[4982]: I0122 06:00:32.799207 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:32 crc kubenswrapper[4982]: I0122 06:00:32.818183 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" podStartSLOduration=2.496927407 podStartE2EDuration="6.818163255s" podCreationTimestamp="2026-01-22 06:00:26 +0000 UTC" firstStartedPulling="2026-01-22 06:00:27.746153535 +0000 UTC m=+888.584791548" lastFinishedPulling="2026-01-22 06:00:32.067389383 +0000 UTC m=+892.906027396" observedRunningTime="2026-01-22 06:00:32.816184062 +0000 UTC m=+893.654822165" watchObservedRunningTime="2026-01-22 06:00:32.818163255 +0000 UTC m=+893.656801268" Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.831937 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.834466 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.846318 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.982572 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.982682 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:41 crc kubenswrapper[4982]: I0122 06:00:41.982759 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thcjh\" (UniqueName: \"kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.084245 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.084340 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.084390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thcjh\" (UniqueName: \"kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.084825 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.084997 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.113007 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thcjh\" (UniqueName: \"kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh\") pod \"community-operators-4pmq9\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.166944 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.629609 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:42 crc kubenswrapper[4982]: I0122 06:00:42.862254 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerStarted","Data":"c2b066c0f90bd98251b4c767b49d8719343097d014aa4259519be3b87d3de57e"} Jan 22 06:00:43 crc kubenswrapper[4982]: I0122 06:00:43.869224 4982 generic.go:334] "Generic (PLEG): container finished" podID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerID="f22d1a7c9b26d26cd26004e2bde7f014cbfc401742f4b10f6ba80692ba1e5f9f" exitCode=0 Jan 22 06:00:43 crc kubenswrapper[4982]: I0122 06:00:43.869296 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerDied","Data":"f22d1a7c9b26d26cd26004e2bde7f014cbfc401742f4b10f6ba80692ba1e5f9f"} Jan 22 06:00:44 crc kubenswrapper[4982]: I0122 06:00:44.879306 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerStarted","Data":"8b314360cbd45d41a25fb6a6c8e4d08ce18321fe19b792eba351a158a3fa193c"} Jan 22 06:00:45 crc kubenswrapper[4982]: I0122 06:00:45.888075 4982 generic.go:334] "Generic (PLEG): container finished" podID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerID="8b314360cbd45d41a25fb6a6c8e4d08ce18321fe19b792eba351a158a3fa193c" exitCode=0 Jan 22 06:00:45 crc kubenswrapper[4982]: I0122 06:00:45.888150 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerDied","Data":"8b314360cbd45d41a25fb6a6c8e4d08ce18321fe19b792eba351a158a3fa193c"} Jan 22 06:00:46 crc kubenswrapper[4982]: I0122 06:00:46.897559 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerStarted","Data":"66659120456c903718dfc221bd80b369a09b38aa4653fde7d7f69b121097938b"} Jan 22 06:00:46 crc kubenswrapper[4982]: I0122 06:00:46.941336 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4pmq9" podStartSLOduration=3.438175602 podStartE2EDuration="5.94132043s" podCreationTimestamp="2026-01-22 06:00:41 +0000 UTC" firstStartedPulling="2026-01-22 06:00:43.871621964 +0000 UTC m=+904.710259957" lastFinishedPulling="2026-01-22 06:00:46.374766742 +0000 UTC m=+907.213404785" observedRunningTime="2026-01-22 06:00:46.940045785 +0000 UTC m=+907.778683808" watchObservedRunningTime="2026-01-22 06:00:46.94132043 +0000 UTC m=+907.779958423" Jan 22 06:00:47 crc kubenswrapper[4982]: I0122 06:00:47.483002 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-579c7888b9-9pz77" Jan 22 06:00:52 crc kubenswrapper[4982]: I0122 06:00:52.167141 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:52 crc kubenswrapper[4982]: I0122 06:00:52.167630 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:52 crc kubenswrapper[4982]: I0122 06:00:52.230802 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:53 crc kubenswrapper[4982]: I0122 06:00:53.017263 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:53 crc kubenswrapper[4982]: I0122 06:00:53.084101 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:54 crc kubenswrapper[4982]: I0122 06:00:54.960828 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4pmq9" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="registry-server" containerID="cri-o://66659120456c903718dfc221bd80b369a09b38aa4653fde7d7f69b121097938b" gracePeriod=2 Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.671534 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.673196 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.696438 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.771533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4nmp\" (UniqueName: \"kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.771607 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.771780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.873205 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4nmp\" (UniqueName: \"kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.873294 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.873348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.874393 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.874467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.898092 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4nmp\" (UniqueName: \"kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp\") pod \"certified-operators-lbd2v\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:55 crc kubenswrapper[4982]: I0122 06:00:55.995562 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:00:56 crc kubenswrapper[4982]: I0122 06:00:56.865239 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:00:56 crc kubenswrapper[4982]: I0122 06:00:56.978128 4982 generic.go:334] "Generic (PLEG): container finished" podID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerID="66659120456c903718dfc221bd80b369a09b38aa4653fde7d7f69b121097938b" exitCode=0 Jan 22 06:00:56 crc kubenswrapper[4982]: I0122 06:00:56.978216 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerDied","Data":"66659120456c903718dfc221bd80b369a09b38aa4653fde7d7f69b121097938b"} Jan 22 06:00:56 crc kubenswrapper[4982]: I0122 06:00:56.979965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerStarted","Data":"a7bca066e60a51c790755940da2a370158d3f9693c3d124f3f320d40ece025aa"} Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.105770 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.290243 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content\") pod \"799f3ee4-6876-461e-92d8-b36a51dd574e\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.290336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thcjh\" (UniqueName: \"kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh\") pod \"799f3ee4-6876-461e-92d8-b36a51dd574e\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.290413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities\") pod \"799f3ee4-6876-461e-92d8-b36a51dd574e\" (UID: \"799f3ee4-6876-461e-92d8-b36a51dd574e\") " Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.291745 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities" (OuterVolumeSpecName: "utilities") pod "799f3ee4-6876-461e-92d8-b36a51dd574e" (UID: "799f3ee4-6876-461e-92d8-b36a51dd574e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.296190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh" (OuterVolumeSpecName: "kube-api-access-thcjh") pod "799f3ee4-6876-461e-92d8-b36a51dd574e" (UID: "799f3ee4-6876-461e-92d8-b36a51dd574e"). InnerVolumeSpecName "kube-api-access-thcjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.339509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "799f3ee4-6876-461e-92d8-b36a51dd574e" (UID: "799f3ee4-6876-461e-92d8-b36a51dd574e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.392245 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thcjh\" (UniqueName: \"kubernetes.io/projected/799f3ee4-6876-461e-92d8-b36a51dd574e-kube-api-access-thcjh\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.392304 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.392319 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799f3ee4-6876-461e-92d8-b36a51dd574e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.991891 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pmq9" event={"ID":"799f3ee4-6876-461e-92d8-b36a51dd574e","Type":"ContainerDied","Data":"c2b066c0f90bd98251b4c767b49d8719343097d014aa4259519be3b87d3de57e"} Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.991917 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pmq9" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.992265 4982 scope.go:117] "RemoveContainer" containerID="66659120456c903718dfc221bd80b369a09b38aa4653fde7d7f69b121097938b" Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.994968 4982 generic.go:334] "Generic (PLEG): container finished" podID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerID="48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7" exitCode=0 Jan 22 06:00:57 crc kubenswrapper[4982]: I0122 06:00:57.995042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerDied","Data":"48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7"} Jan 22 06:00:58 crc kubenswrapper[4982]: I0122 06:00:58.018563 4982 scope.go:117] "RemoveContainer" containerID="8b314360cbd45d41a25fb6a6c8e4d08ce18321fe19b792eba351a158a3fa193c" Jan 22 06:00:58 crc kubenswrapper[4982]: I0122 06:00:58.054999 4982 scope.go:117] "RemoveContainer" containerID="f22d1a7c9b26d26cd26004e2bde7f014cbfc401742f4b10f6ba80692ba1e5f9f" Jan 22 06:00:58 crc kubenswrapper[4982]: I0122 06:00:58.055129 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:58 crc kubenswrapper[4982]: I0122 06:00:58.065539 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4pmq9"] Jan 22 06:00:59 crc kubenswrapper[4982]: I0122 06:00:59.005835 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerStarted","Data":"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3"} Jan 22 06:00:59 crc kubenswrapper[4982]: I0122 06:00:59.732113 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" path="/var/lib/kubelet/pods/799f3ee4-6876-461e-92d8-b36a51dd574e/volumes" Jan 22 06:01:00 crc kubenswrapper[4982]: I0122 06:01:00.017439 4982 generic.go:334] "Generic (PLEG): container finished" podID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerID="0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3" exitCode=0 Jan 22 06:01:00 crc kubenswrapper[4982]: I0122 06:01:00.017469 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerDied","Data":"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3"} Jan 22 06:01:01 crc kubenswrapper[4982]: I0122 06:01:01.031192 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerStarted","Data":"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00"} Jan 22 06:01:01 crc kubenswrapper[4982]: I0122 06:01:01.052715 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lbd2v" podStartSLOduration=3.649572348 podStartE2EDuration="6.052692465s" podCreationTimestamp="2026-01-22 06:00:55 +0000 UTC" firstStartedPulling="2026-01-22 06:00:57.997507739 +0000 UTC m=+918.836145752" lastFinishedPulling="2026-01-22 06:01:00.400627826 +0000 UTC m=+921.239265869" observedRunningTime="2026-01-22 06:01:01.052109809 +0000 UTC m=+921.890747852" watchObservedRunningTime="2026-01-22 06:01:01.052692465 +0000 UTC m=+921.891330468" Jan 22 06:01:05 crc kubenswrapper[4982]: I0122 06:01:05.995758 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:05 crc kubenswrapper[4982]: I0122 06:01:05.996347 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:06 crc kubenswrapper[4982]: I0122 06:01:06.045347 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:06 crc kubenswrapper[4982]: I0122 06:01:06.120797 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:06 crc kubenswrapper[4982]: I0122 06:01:06.290570 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:01:06 crc kubenswrapper[4982]: I0122 06:01:06.920715 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5bc8cc58c6-wjczj" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.803951 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4v5kf"] Jan 22 06:01:07 crc kubenswrapper[4982]: E0122 06:01:07.804606 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="extract-content" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.804629 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="extract-content" Jan 22 06:01:07 crc kubenswrapper[4982]: E0122 06:01:07.804660 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="extract-utilities" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.804669 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="extract-utilities" Jan 22 06:01:07 crc kubenswrapper[4982]: E0122 06:01:07.804687 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="registry-server" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.804694 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="registry-server" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.804930 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="799f3ee4-6876-461e-92d8-b36a51dd574e" containerName="registry-server" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.814614 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.836078 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-4ct5b" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.836325 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.837230 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.837349 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr"] Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.838320 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.845752 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr"] Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.847992 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.929411 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-7w7hm"] Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.930936 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7w7hm" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.937020 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.937258 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-9qkbs" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.937432 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.937547 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941407 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-sockets\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-conf\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941580 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941631 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-reloader\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941667 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941691 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg6fg\" (UniqueName: \"kubernetes.io/projected/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-kube-api-access-jg6fg\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941835 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics-certs\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.941982 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfzw7\" (UniqueName: \"kubernetes.io/projected/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-kube-api-access-rfzw7\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.942018 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-startup\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.950119 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-z65sv"] Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.951164 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.954076 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 22 06:01:07 crc kubenswrapper[4982]: I0122 06:01:07.965444 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-z65sv"] Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043652 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-sockets\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043748 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-conf\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043844 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-reloader\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.043886 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-cert\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044005 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.044028 4982 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044069 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg6fg\" (UniqueName: \"kubernetes.io/projected/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-kube-api-access-jg6fg\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.044137 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert podName:14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa nodeName:}" failed. No retries permitted until 2026-01-22 06:01:08.544101268 +0000 UTC m=+929.382739291 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert") pod "frr-k8s-webhook-server-7df86c4f6c-5f6hr" (UID: "14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa") : secret "frr-k8s-webhook-server-cert" not found Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044232 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics-certs\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044270 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snsmm\" (UniqueName: \"kubernetes.io/projected/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-kube-api-access-snsmm\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044320 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metallb-excludel2\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044378 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfzw7\" (UniqueName: \"kubernetes.io/projected/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-kube-api-access-rfzw7\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044393 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-reloader\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044420 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-startup\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044486 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmwtc\" (UniqueName: \"kubernetes.io/projected/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-kube-api-access-pmwtc\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044597 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-conf\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.044669 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.045109 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-sockets\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.045701 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-frr-startup\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.055751 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-metrics-certs\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.067670 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg6fg\" (UniqueName: \"kubernetes.io/projected/8b5087c1-3eb3-46f7-8013-66ffbf2e6f69-kube-api-access-jg6fg\") pod \"frr-k8s-4v5kf\" (UID: \"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69\") " pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.076952 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfzw7\" (UniqueName: \"kubernetes.io/projected/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-kube-api-access-rfzw7\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.089780 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lbd2v" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="registry-server" containerID="cri-o://92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00" gracePeriod=2 Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146302 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146418 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146461 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-cert\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snsmm\" (UniqueName: \"kubernetes.io/projected/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-kube-api-access-snsmm\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metallb-excludel2\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.146575 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmwtc\" (UniqueName: \"kubernetes.io/projected/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-kube-api-access-pmwtc\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.146527 4982 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.147051 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs podName:2df7325f-3ac5-49eb-9c91-adc20d7b3e80 nodeName:}" failed. No retries permitted until 2026-01-22 06:01:08.647025877 +0000 UTC m=+929.485663890 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs") pod "speaker-7w7hm" (UID: "2df7325f-3ac5-49eb-9c91-adc20d7b3e80") : secret "speaker-certs-secret" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.146653 4982 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.147157 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs podName:d8de638b-d0f4-419d-8df5-d0892e3ffc6d nodeName:}" failed. No retries permitted until 2026-01-22 06:01:08.64713846 +0000 UTC m=+929.485776463 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs") pod "controller-6968d8fdc4-z65sv" (UID: "d8de638b-d0f4-419d-8df5-d0892e3ffc6d") : secret "controller-certs-secret" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.146700 4982 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.147201 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist podName:2df7325f-3ac5-49eb-9c91-adc20d7b3e80 nodeName:}" failed. No retries permitted until 2026-01-22 06:01:08.647194862 +0000 UTC m=+929.485832865 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist") pod "speaker-7w7hm" (UID: "2df7325f-3ac5-49eb-9c91-adc20d7b3e80") : secret "metallb-memberlist" not found Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.147844 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metallb-excludel2\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.150643 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.161986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-cert\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.167239 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.169560 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snsmm\" (UniqueName: \"kubernetes.io/projected/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-kube-api-access-snsmm\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.188906 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmwtc\" (UniqueName: \"kubernetes.io/projected/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-kube-api-access-pmwtc\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.445124 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.563204 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content\") pod \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.563315 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4nmp\" (UniqueName: \"kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp\") pod \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.563349 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities\") pod \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\" (UID: \"fe800044-3586-4b5a-a90c-0f7d1005e7f5\") " Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.563768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.564622 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities" (OuterVolumeSpecName: "utilities") pod "fe800044-3586-4b5a-a90c-0f7d1005e7f5" (UID: "fe800044-3586-4b5a-a90c-0f7d1005e7f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.570083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp" (OuterVolumeSpecName: "kube-api-access-l4nmp") pod "fe800044-3586-4b5a-a90c-0f7d1005e7f5" (UID: "fe800044-3586-4b5a-a90c-0f7d1005e7f5"). InnerVolumeSpecName "kube-api-access-l4nmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.571398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-5f6hr\" (UID: \"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.609275 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe800044-3586-4b5a-a90c-0f7d1005e7f5" (UID: "fe800044-3586-4b5a-a90c-0f7d1005e7f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665373 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665470 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665539 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665554 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4nmp\" (UniqueName: \"kubernetes.io/projected/fe800044-3586-4b5a-a90c-0f7d1005e7f5-kube-api-access-l4nmp\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.665567 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe800044-3586-4b5a-a90c-0f7d1005e7f5-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.665711 4982 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 06:01:08 crc kubenswrapper[4982]: E0122 06:01:08.665832 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist podName:2df7325f-3ac5-49eb-9c91-adc20d7b3e80 nodeName:}" failed. No retries permitted until 2026-01-22 06:01:09.665812203 +0000 UTC m=+930.504450206 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist") pod "speaker-7w7hm" (UID: "2df7325f-3ac5-49eb-9c91-adc20d7b3e80") : secret "metallb-memberlist" not found Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.670068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-metrics-certs\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.670570 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8de638b-d0f4-419d-8df5-d0892e3ffc6d-metrics-certs\") pod \"controller-6968d8fdc4-z65sv\" (UID: \"d8de638b-d0f4-419d-8df5-d0892e3ffc6d\") " pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.783469 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:08 crc kubenswrapper[4982]: I0122 06:01:08.872246 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.063128 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr"] Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.107780 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" event={"ID":"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa","Type":"ContainerStarted","Data":"780c41b87e5049cf77d59c735fed83116576f542396f69de7e253856a19cf712"} Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.109451 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"c94f59d2737e1f533c23e5aac045f2599154c67da776fca551f55d8e5b9d1fc2"} Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.113066 4982 generic.go:334] "Generic (PLEG): container finished" podID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerID="92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00" exitCode=0 Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.113121 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerDied","Data":"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00"} Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.113169 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbd2v" event={"ID":"fe800044-3586-4b5a-a90c-0f7d1005e7f5","Type":"ContainerDied","Data":"a7bca066e60a51c790755940da2a370158d3f9693c3d124f3f320d40ece025aa"} Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.113206 4982 scope.go:117] "RemoveContainer" containerID="92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.113250 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbd2v" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.155580 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.156046 4982 scope.go:117] "RemoveContainer" containerID="0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.159388 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lbd2v"] Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.181300 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-z65sv"] Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.181996 4982 scope.go:117] "RemoveContainer" containerID="48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.213363 4982 scope.go:117] "RemoveContainer" containerID="92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00" Jan 22 06:01:09 crc kubenswrapper[4982]: E0122 06:01:09.214058 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00\": container with ID starting with 92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00 not found: ID does not exist" containerID="92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.214091 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00"} err="failed to get container status \"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00\": rpc error: code = NotFound desc = could not find container \"92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00\": container with ID starting with 92b9cef61a25fcedc0551fed57fd1b7e53dab56bc5f870cdb879dee2a6f82e00 not found: ID does not exist" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.214115 4982 scope.go:117] "RemoveContainer" containerID="0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3" Jan 22 06:01:09 crc kubenswrapper[4982]: E0122 06:01:09.215095 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3\": container with ID starting with 0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3 not found: ID does not exist" containerID="0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.215120 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3"} err="failed to get container status \"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3\": rpc error: code = NotFound desc = could not find container \"0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3\": container with ID starting with 0510e35d1c6e6a74920ef81ee2eb0e4de6386cbf1536c346ec61dbbef05bbbc3 not found: ID does not exist" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.215134 4982 scope.go:117] "RemoveContainer" containerID="48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7" Jan 22 06:01:09 crc kubenswrapper[4982]: W0122 06:01:09.216057 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8de638b_d0f4_419d_8df5_d0892e3ffc6d.slice/crio-28fcb73313ba5960d80bea951b03cb1ee55339c6a15e4605fe6fc03ffd9dd7d3 WatchSource:0}: Error finding container 28fcb73313ba5960d80bea951b03cb1ee55339c6a15e4605fe6fc03ffd9dd7d3: Status 404 returned error can't find the container with id 28fcb73313ba5960d80bea951b03cb1ee55339c6a15e4605fe6fc03ffd9dd7d3 Jan 22 06:01:09 crc kubenswrapper[4982]: E0122 06:01:09.216161 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7\": container with ID starting with 48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7 not found: ID does not exist" containerID="48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.216214 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7"} err="failed to get container status \"48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7\": rpc error: code = NotFound desc = could not find container \"48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7\": container with ID starting with 48a28d0cd4a5c2af4d2ce1a77e7da3caa23e1a6d1f64b655279942071bbf66a7 not found: ID does not exist" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.683106 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.691517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2df7325f-3ac5-49eb-9c91-adc20d7b3e80-memberlist\") pod \"speaker-7w7hm\" (UID: \"2df7325f-3ac5-49eb-9c91-adc20d7b3e80\") " pod="metallb-system/speaker-7w7hm" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.728479 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" path="/var/lib/kubelet/pods/fe800044-3586-4b5a-a90c-0f7d1005e7f5/volumes" Jan 22 06:01:09 crc kubenswrapper[4982]: I0122 06:01:09.751522 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7w7hm" Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.154262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7w7hm" event={"ID":"2df7325f-3ac5-49eb-9c91-adc20d7b3e80","Type":"ContainerStarted","Data":"4edf132501d8c0c9c02384317879d855979be3d4203f476cbe3c2f8ad2aef2e9"} Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.154934 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7w7hm" event={"ID":"2df7325f-3ac5-49eb-9c91-adc20d7b3e80","Type":"ContainerStarted","Data":"0f9b6cb2d8d938f921f9ae63885b0e5bdc21db779feedd37285c6dc39e16d528"} Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.168440 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-z65sv" event={"ID":"d8de638b-d0f4-419d-8df5-d0892e3ffc6d","Type":"ContainerStarted","Data":"bbf55d244ce68156582a4088fa65ad4e356f58706723cbe66d447c39578552fa"} Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.168495 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-z65sv" event={"ID":"d8de638b-d0f4-419d-8df5-d0892e3ffc6d","Type":"ContainerStarted","Data":"d3ccba3897f9aef11f87b4fb744c382066786c292a0c11db554f59d5fe76b15d"} Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.168509 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-z65sv" event={"ID":"d8de638b-d0f4-419d-8df5-d0892e3ffc6d","Type":"ContainerStarted","Data":"28fcb73313ba5960d80bea951b03cb1ee55339c6a15e4605fe6fc03ffd9dd7d3"} Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.169667 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:10 crc kubenswrapper[4982]: I0122 06:01:10.201267 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-z65sv" podStartSLOduration=3.201239376 podStartE2EDuration="3.201239376s" podCreationTimestamp="2026-01-22 06:01:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:01:10.195669173 +0000 UTC m=+931.034307186" watchObservedRunningTime="2026-01-22 06:01:10.201239376 +0000 UTC m=+931.039877379" Jan 22 06:01:11 crc kubenswrapper[4982]: I0122 06:01:11.182113 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7w7hm" event={"ID":"2df7325f-3ac5-49eb-9c91-adc20d7b3e80","Type":"ContainerStarted","Data":"325690b2c95422746800af9b072c61b23bab453aa693c1477a65d468b579df6e"} Jan 22 06:01:11 crc kubenswrapper[4982]: I0122 06:01:11.213006 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-7w7hm" podStartSLOduration=4.212977108 podStartE2EDuration="4.212977108s" podCreationTimestamp="2026-01-22 06:01:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:01:11.209226436 +0000 UTC m=+932.047864449" watchObservedRunningTime="2026-01-22 06:01:11.212977108 +0000 UTC m=+932.051615111" Jan 22 06:01:12 crc kubenswrapper[4982]: I0122 06:01:12.192382 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-7w7hm" Jan 22 06:01:17 crc kubenswrapper[4982]: I0122 06:01:17.238133 4982 generic.go:334] "Generic (PLEG): container finished" podID="8b5087c1-3eb3-46f7-8013-66ffbf2e6f69" containerID="0db272fdbc55aaf58ddedc2d59ee6c4a9669b7d9939c9e7401423262f576ac38" exitCode=0 Jan 22 06:01:17 crc kubenswrapper[4982]: I0122 06:01:17.238216 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerDied","Data":"0db272fdbc55aaf58ddedc2d59ee6c4a9669b7d9939c9e7401423262f576ac38"} Jan 22 06:01:17 crc kubenswrapper[4982]: I0122 06:01:17.240681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" event={"ID":"14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa","Type":"ContainerStarted","Data":"33e5ffe58383eb7511d7cad9c5c09d5fae07013811bc9dd68a34294132fe4ef3"} Jan 22 06:01:17 crc kubenswrapper[4982]: I0122 06:01:17.240826 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:17 crc kubenswrapper[4982]: I0122 06:01:17.302369 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" podStartSLOduration=2.97574299 podStartE2EDuration="10.302353836s" podCreationTimestamp="2026-01-22 06:01:07 +0000 UTC" firstStartedPulling="2026-01-22 06:01:09.091206553 +0000 UTC m=+929.929844556" lastFinishedPulling="2026-01-22 06:01:16.417817359 +0000 UTC m=+937.256455402" observedRunningTime="2026-01-22 06:01:17.298787647 +0000 UTC m=+938.137425650" watchObservedRunningTime="2026-01-22 06:01:17.302353836 +0000 UTC m=+938.140991839" Jan 22 06:01:18 crc kubenswrapper[4982]: I0122 06:01:18.258703 4982 generic.go:334] "Generic (PLEG): container finished" podID="8b5087c1-3eb3-46f7-8013-66ffbf2e6f69" containerID="de2003451d6bed805007b04198ea1800d40a7e19535ba90d64c99fe06f6320d9" exitCode=0 Jan 22 06:01:18 crc kubenswrapper[4982]: I0122 06:01:18.260187 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerDied","Data":"de2003451d6bed805007b04198ea1800d40a7e19535ba90d64c99fe06f6320d9"} Jan 22 06:01:19 crc kubenswrapper[4982]: I0122 06:01:19.272832 4982 generic.go:334] "Generic (PLEG): container finished" podID="8b5087c1-3eb3-46f7-8013-66ffbf2e6f69" containerID="ba112dfa6ce565bf879a249c7a726dcb2b5be81f1e279f73b8dfe1d2a8fb6ac0" exitCode=0 Jan 22 06:01:19 crc kubenswrapper[4982]: I0122 06:01:19.272912 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerDied","Data":"ba112dfa6ce565bf879a249c7a726dcb2b5be81f1e279f73b8dfe1d2a8fb6ac0"} Jan 22 06:01:20 crc kubenswrapper[4982]: I0122 06:01:20.286463 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"0d52e130f636d2d4feca938f9e853d7d6b6159eed089f0bb9f553bdc2ec699ab"} Jan 22 06:01:20 crc kubenswrapper[4982]: I0122 06:01:20.286799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"b4b0b146b717f1eb25cd1fcc4d3cc41a2728a33f267a8921fc248c9261e9a10d"} Jan 22 06:01:20 crc kubenswrapper[4982]: I0122 06:01:20.286808 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"b0bf728dd3d01cd1839d41fb923d6d780d71f3ede2619f72c72345696b4964c3"} Jan 22 06:01:20 crc kubenswrapper[4982]: I0122 06:01:20.286820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"c6f5032ceb8e4a02b8ce6270e3596ece8c45f761e863c4f5c0529b173fc14b15"} Jan 22 06:01:20 crc kubenswrapper[4982]: I0122 06:01:20.286827 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"6dfd132a35095e994f1ac75d3bac4beb1a19d6a03291565e524c2b3f1368bef3"} Jan 22 06:01:21 crc kubenswrapper[4982]: I0122 06:01:21.304140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4v5kf" event={"ID":"8b5087c1-3eb3-46f7-8013-66ffbf2e6f69","Type":"ContainerStarted","Data":"822856e6c7ad96c9d79c237f79eee07385d81b727f47003dcbad73e8364440b1"} Jan 22 06:01:21 crc kubenswrapper[4982]: I0122 06:01:21.304439 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:21 crc kubenswrapper[4982]: I0122 06:01:21.341667 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4v5kf" podStartSLOduration=6.246653414 podStartE2EDuration="14.341634135s" podCreationTimestamp="2026-01-22 06:01:07 +0000 UTC" firstStartedPulling="2026-01-22 06:01:08.291543048 +0000 UTC m=+929.130181051" lastFinishedPulling="2026-01-22 06:01:16.386523769 +0000 UTC m=+937.225161772" observedRunningTime="2026-01-22 06:01:21.340120843 +0000 UTC m=+942.178758916" watchObservedRunningTime="2026-01-22 06:01:21.341634135 +0000 UTC m=+942.180272178" Jan 22 06:01:23 crc kubenswrapper[4982]: I0122 06:01:23.167948 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:23 crc kubenswrapper[4982]: I0122 06:01:23.237082 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:28 crc kubenswrapper[4982]: I0122 06:01:28.795688 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-5f6hr" Jan 22 06:01:28 crc kubenswrapper[4982]: I0122 06:01:28.877438 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-z65sv" Jan 22 06:01:29 crc kubenswrapper[4982]: I0122 06:01:29.757965 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-7w7hm" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.483228 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t"] Jan 22 06:01:31 crc kubenswrapper[4982]: E0122 06:01:31.483960 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="registry-server" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.483985 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="registry-server" Jan 22 06:01:31 crc kubenswrapper[4982]: E0122 06:01:31.484013 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="extract-content" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.484026 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="extract-content" Jan 22 06:01:31 crc kubenswrapper[4982]: E0122 06:01:31.484039 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="extract-utilities" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.484052 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="extract-utilities" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.484257 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe800044-3586-4b5a-a90c-0f7d1005e7f5" containerName="registry-server" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.485613 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.489112 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.501002 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t"] Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.644479 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.644994 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.645085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxcxt\" (UniqueName: \"kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.746745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.746814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxcxt\" (UniqueName: \"kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.746876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.747398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.747597 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.780484 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxcxt\" (UniqueName: \"kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:31 crc kubenswrapper[4982]: I0122 06:01:31.803302 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:32 crc kubenswrapper[4982]: I0122 06:01:32.066626 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t"] Jan 22 06:01:32 crc kubenswrapper[4982]: I0122 06:01:32.394001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" event={"ID":"e7d6e3ce-1d35-44bd-9677-001c19062a90","Type":"ContainerStarted","Data":"dc9fe3cdf7938fb36c328d2cdf69550378239ba58b53d89d5d2259c07f950a16"} Jan 22 06:01:33 crc kubenswrapper[4982]: I0122 06:01:33.405219 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerID="70aa069b2d2a94bf24c4cca1a3446146e9b6c647a2adc49993aeed8754e4ffa6" exitCode=0 Jan 22 06:01:33 crc kubenswrapper[4982]: I0122 06:01:33.405276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" event={"ID":"e7d6e3ce-1d35-44bd-9677-001c19062a90","Type":"ContainerDied","Data":"70aa069b2d2a94bf24c4cca1a3446146e9b6c647a2adc49993aeed8754e4ffa6"} Jan 22 06:01:37 crc kubenswrapper[4982]: I0122 06:01:37.439824 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerID="a6dd4f5b90eef8045f955780775b3cd168584c8a50b1230bed157409d76d67fc" exitCode=0 Jan 22 06:01:37 crc kubenswrapper[4982]: I0122 06:01:37.439898 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" event={"ID":"e7d6e3ce-1d35-44bd-9677-001c19062a90","Type":"ContainerDied","Data":"a6dd4f5b90eef8045f955780775b3cd168584c8a50b1230bed157409d76d67fc"} Jan 22 06:01:38 crc kubenswrapper[4982]: I0122 06:01:38.174385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4v5kf" Jan 22 06:01:38 crc kubenswrapper[4982]: I0122 06:01:38.451161 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerID="436e1f32094000aa78156c2534e242117005aebae5eb2727b9745ab3e5e44be3" exitCode=0 Jan 22 06:01:38 crc kubenswrapper[4982]: I0122 06:01:38.451236 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" event={"ID":"e7d6e3ce-1d35-44bd-9677-001c19062a90","Type":"ContainerDied","Data":"436e1f32094000aa78156c2534e242117005aebae5eb2727b9745ab3e5e44be3"} Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.816218 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.985202 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util\") pod \"e7d6e3ce-1d35-44bd-9677-001c19062a90\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.985309 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle\") pod \"e7d6e3ce-1d35-44bd-9677-001c19062a90\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.985331 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxcxt\" (UniqueName: \"kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt\") pod \"e7d6e3ce-1d35-44bd-9677-001c19062a90\" (UID: \"e7d6e3ce-1d35-44bd-9677-001c19062a90\") " Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.986904 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle" (OuterVolumeSpecName: "bundle") pod "e7d6e3ce-1d35-44bd-9677-001c19062a90" (UID: "e7d6e3ce-1d35-44bd-9677-001c19062a90"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.991054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt" (OuterVolumeSpecName: "kube-api-access-jxcxt") pod "e7d6e3ce-1d35-44bd-9677-001c19062a90" (UID: "e7d6e3ce-1d35-44bd-9677-001c19062a90"). InnerVolumeSpecName "kube-api-access-jxcxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:01:39 crc kubenswrapper[4982]: I0122 06:01:39.994945 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util" (OuterVolumeSpecName: "util") pod "e7d6e3ce-1d35-44bd-9677-001c19062a90" (UID: "e7d6e3ce-1d35-44bd-9677-001c19062a90"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.086689 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-util\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.086736 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7d6e3ce-1d35-44bd-9677-001c19062a90-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.086754 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxcxt\" (UniqueName: \"kubernetes.io/projected/e7d6e3ce-1d35-44bd-9677-001c19062a90-kube-api-access-jxcxt\") on node \"crc\" DevicePath \"\"" Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.471740 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" event={"ID":"e7d6e3ce-1d35-44bd-9677-001c19062a90","Type":"ContainerDied","Data":"dc9fe3cdf7938fb36c328d2cdf69550378239ba58b53d89d5d2259c07f950a16"} Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.472159 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc9fe3cdf7938fb36c328d2cdf69550378239ba58b53d89d5d2259c07f950a16" Jan 22 06:01:40 crc kubenswrapper[4982]: I0122 06:01:40.472252 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.730558 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55"] Jan 22 06:01:44 crc kubenswrapper[4982]: E0122 06:01:44.731232 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="extract" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.731251 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="extract" Jan 22 06:01:44 crc kubenswrapper[4982]: E0122 06:01:44.731264 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="util" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.731272 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="util" Jan 22 06:01:44 crc kubenswrapper[4982]: E0122 06:01:44.731284 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="pull" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.731291 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="pull" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.731430 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7d6e3ce-1d35-44bd-9677-001c19062a90" containerName="extract" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.731966 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.735040 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.735144 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.735248 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-4qn9p" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.748282 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55"] Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.861452 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jlj4\" (UniqueName: \"kubernetes.io/projected/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-kube-api-access-9jlj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.861599 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.963296 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.963401 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jlj4\" (UniqueName: \"kubernetes.io/projected/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-kube-api-access-9jlj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.964201 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:44 crc kubenswrapper[4982]: I0122 06:01:44.988905 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jlj4\" (UniqueName: \"kubernetes.io/projected/747c8ee6-9dd9-4cef-9ca6-b653c937e70b-kube-api-access-9jlj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-r8g55\" (UID: \"747c8ee6-9dd9-4cef-9ca6-b653c937e70b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:45 crc kubenswrapper[4982]: I0122 06:01:45.056550 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" Jan 22 06:01:45 crc kubenswrapper[4982]: I0122 06:01:45.658226 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55"] Jan 22 06:01:46 crc kubenswrapper[4982]: I0122 06:01:46.512893 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" event={"ID":"747c8ee6-9dd9-4cef-9ca6-b653c937e70b","Type":"ContainerStarted","Data":"4881f8ca607a1d018063827c731a6b36fa07a81d797d411bb4c709747e87d6c5"} Jan 22 06:01:54 crc kubenswrapper[4982]: I0122 06:01:54.578497 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" event={"ID":"747c8ee6-9dd9-4cef-9ca6-b653c937e70b","Type":"ContainerStarted","Data":"fa6ba289a0d3e21510987b30af52fbe8bad50acb2924f2e88342a83d21de0280"} Jan 22 06:01:54 crc kubenswrapper[4982]: I0122 06:01:54.620186 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-r8g55" podStartSLOduration=2.320276586 podStartE2EDuration="10.620153935s" podCreationTimestamp="2026-01-22 06:01:44 +0000 UTC" firstStartedPulling="2026-01-22 06:01:45.67306558 +0000 UTC m=+966.511703583" lastFinishedPulling="2026-01-22 06:01:53.972942919 +0000 UTC m=+974.811580932" observedRunningTime="2026-01-22 06:01:54.606011086 +0000 UTC m=+975.444649129" watchObservedRunningTime="2026-01-22 06:01:54.620153935 +0000 UTC m=+975.458791958" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.084491 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbph5"] Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.085539 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.087596 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.087763 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.091306 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-4k6wd" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.100814 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbph5"] Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.270322 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.270509 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdlfh\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-kube-api-access-hdlfh\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.372052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdlfh\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-kube-api-access-hdlfh\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.372142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.399521 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdlfh\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-kube-api-access-hdlfh\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.399525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aeb8c181-493c-4f85-8500-f2109edc40f7-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbph5\" (UID: \"aeb8c181-493c-4f85-8500-f2109edc40f7\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.438487 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:01:58 crc kubenswrapper[4982]: I0122 06:01:58.873382 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbph5"] Jan 22 06:01:58 crc kubenswrapper[4982]: W0122 06:01:58.886381 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaeb8c181_493c_4f85_8500_f2109edc40f7.slice/crio-328dd2da14b5e4b46419a474459ac2256b59d14d55a5a43d1e5dd194fbcdc65e WatchSource:0}: Error finding container 328dd2da14b5e4b46419a474459ac2256b59d14d55a5a43d1e5dd194fbcdc65e: Status 404 returned error can't find the container with id 328dd2da14b5e4b46419a474459ac2256b59d14d55a5a43d1e5dd194fbcdc65e Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.038563 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n"] Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.039757 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.041422 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-p89dn" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.050093 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n"] Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.083147 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8gbr\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-kube-api-access-k8gbr\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.083598 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.184879 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.184942 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8gbr\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-kube-api-access-k8gbr\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.209149 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8gbr\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-kube-api-access-k8gbr\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.215800 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc5f2133-2f6a-4e37-9224-c109867618ec-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sgz2n\" (UID: \"cc5f2133-2f6a-4e37-9224-c109867618ec\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.372332 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.635060 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" event={"ID":"aeb8c181-493c-4f85-8500-f2109edc40f7","Type":"ContainerStarted","Data":"328dd2da14b5e4b46419a474459ac2256b59d14d55a5a43d1e5dd194fbcdc65e"} Jan 22 06:01:59 crc kubenswrapper[4982]: I0122 06:01:59.732523 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n"] Jan 22 06:02:00 crc kubenswrapper[4982]: I0122 06:02:00.644114 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" event={"ID":"cc5f2133-2f6a-4e37-9224-c109867618ec","Type":"ContainerStarted","Data":"ed411b193d817fcbadbcffe678fa5f9b7218cec87e1d0ee76eac529810afad42"} Jan 22 06:02:07 crc kubenswrapper[4982]: I0122 06:02:07.694186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" event={"ID":"aeb8c181-493c-4f85-8500-f2109edc40f7","Type":"ContainerStarted","Data":"588b7f08e58a6071c61670f6856aea62499619477c1afb525b46b335800abfbb"} Jan 22 06:02:07 crc kubenswrapper[4982]: I0122 06:02:07.695017 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:02:07 crc kubenswrapper[4982]: I0122 06:02:07.696691 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" event={"ID":"cc5f2133-2f6a-4e37-9224-c109867618ec","Type":"ContainerStarted","Data":"fb43b605806c1bee890e3dd24e0834fb0f45e4094adaa89e9850973e73924695"} Jan 22 06:02:07 crc kubenswrapper[4982]: I0122 06:02:07.719548 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" podStartSLOduration=1.852077377 podStartE2EDuration="9.719524454s" podCreationTimestamp="2026-01-22 06:01:58 +0000 UTC" firstStartedPulling="2026-01-22 06:01:58.888867388 +0000 UTC m=+979.727505391" lastFinishedPulling="2026-01-22 06:02:06.756314455 +0000 UTC m=+987.594952468" observedRunningTime="2026-01-22 06:02:07.71756373 +0000 UTC m=+988.556201783" watchObservedRunningTime="2026-01-22 06:02:07.719524454 +0000 UTC m=+988.558162497" Jan 22 06:02:07 crc kubenswrapper[4982]: I0122 06:02:07.740829 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sgz2n" podStartSLOduration=1.750747182 podStartE2EDuration="8.740800259s" podCreationTimestamp="2026-01-22 06:01:59 +0000 UTC" firstStartedPulling="2026-01-22 06:01:59.739956945 +0000 UTC m=+980.578594948" lastFinishedPulling="2026-01-22 06:02:06.730010012 +0000 UTC m=+987.568648025" observedRunningTime="2026-01-22 06:02:07.737451226 +0000 UTC m=+988.576089309" watchObservedRunningTime="2026-01-22 06:02:07.740800259 +0000 UTC m=+988.579438332" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.533004 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-644t8"] Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.535586 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.538342 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vtfj6" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.548501 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-644t8"] Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.578146 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwjk6\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-kube-api-access-dwjk6\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.578290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-bound-sa-token\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.680663 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-bound-sa-token\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.680848 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwjk6\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-kube-api-access-dwjk6\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.708377 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwjk6\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-kube-api-access-dwjk6\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.721125 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97482988-28a9-41f6-90f9-8ab820afdb27-bound-sa-token\") pod \"cert-manager-86cb77c54b-644t8\" (UID: \"97482988-28a9-41f6-90f9-8ab820afdb27\") " pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:10 crc kubenswrapper[4982]: I0122 06:02:10.855385 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-644t8" Jan 22 06:02:11 crc kubenswrapper[4982]: I0122 06:02:11.176148 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-644t8"] Jan 22 06:02:11 crc kubenswrapper[4982]: I0122 06:02:11.726541 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-644t8" event={"ID":"97482988-28a9-41f6-90f9-8ab820afdb27","Type":"ContainerStarted","Data":"7a31b5985a4a123f69b96eea020c7f771a6033d90269db9422df55f4c0e80238"} Jan 22 06:02:12 crc kubenswrapper[4982]: I0122 06:02:12.732505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-644t8" event={"ID":"97482988-28a9-41f6-90f9-8ab820afdb27","Type":"ContainerStarted","Data":"6275470d4e103ad835d9e5a49bc7fd2232f7ba4b3edff8706baf09ecbafc9fdd"} Jan 22 06:02:12 crc kubenswrapper[4982]: I0122 06:02:12.754477 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-644t8" podStartSLOduration=2.754448102 podStartE2EDuration="2.754448102s" podCreationTimestamp="2026-01-22 06:02:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:02:12.75219071 +0000 UTC m=+993.590828783" watchObservedRunningTime="2026-01-22 06:02:12.754448102 +0000 UTC m=+993.593086105" Jan 22 06:02:13 crc kubenswrapper[4982]: I0122 06:02:13.444182 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbph5" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.177499 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.179376 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.182385 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.182422 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-klg5x" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.182396 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.199010 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.288390 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwqmg\" (UniqueName: \"kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg\") pod \"openstack-operator-index-jz42f\" (UID: \"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1\") " pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.390930 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwqmg\" (UniqueName: \"kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg\") pod \"openstack-operator-index-jz42f\" (UID: \"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1\") " pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.413164 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwqmg\" (UniqueName: \"kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg\") pod \"openstack-operator-index-jz42f\" (UID: \"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1\") " pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.507682 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:17 crc kubenswrapper[4982]: I0122 06:02:17.772789 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:18 crc kubenswrapper[4982]: I0122 06:02:18.793356 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jz42f" event={"ID":"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1","Type":"ContainerStarted","Data":"e2b308f5c1a85b220e183bfeee2028225cae128ef151a79a04e1755c2d150ecf"} Jan 22 06:02:18 crc kubenswrapper[4982]: I0122 06:02:18.973699 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:02:18 crc kubenswrapper[4982]: I0122 06:02:18.973777 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.551041 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.804822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jz42f" event={"ID":"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1","Type":"ContainerStarted","Data":"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959"} Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.837396 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jz42f" podStartSLOduration=2.001927521 podStartE2EDuration="2.837349149s" podCreationTimestamp="2026-01-22 06:02:17 +0000 UTC" firstStartedPulling="2026-01-22 06:02:17.808463365 +0000 UTC m=+998.647101368" lastFinishedPulling="2026-01-22 06:02:18.643884953 +0000 UTC m=+999.482522996" observedRunningTime="2026-01-22 06:02:19.827334444 +0000 UTC m=+1000.665972477" watchObservedRunningTime="2026-01-22 06:02:19.837349149 +0000 UTC m=+1000.675987182" Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.963135 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cvnbh"] Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.964796 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:19 crc kubenswrapper[4982]: I0122 06:02:19.969941 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cvnbh"] Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.135502 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2466h\" (UniqueName: \"kubernetes.io/projected/2e0bb805-5881-4548-aebd-0924b927cbdd-kube-api-access-2466h\") pod \"openstack-operator-index-cvnbh\" (UID: \"2e0bb805-5881-4548-aebd-0924b927cbdd\") " pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.236932 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2466h\" (UniqueName: \"kubernetes.io/projected/2e0bb805-5881-4548-aebd-0924b927cbdd-kube-api-access-2466h\") pod \"openstack-operator-index-cvnbh\" (UID: \"2e0bb805-5881-4548-aebd-0924b927cbdd\") " pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.273604 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2466h\" (UniqueName: \"kubernetes.io/projected/2e0bb805-5881-4548-aebd-0924b927cbdd-kube-api-access-2466h\") pod \"openstack-operator-index-cvnbh\" (UID: \"2e0bb805-5881-4548-aebd-0924b927cbdd\") " pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.302342 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.780597 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cvnbh"] Jan 22 06:02:20 crc kubenswrapper[4982]: W0122 06:02:20.786923 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e0bb805_5881_4548_aebd_0924b927cbdd.slice/crio-7fced52f804c576e825aac910ab95a7f022be555c8d186a126cd168a25908687 WatchSource:0}: Error finding container 7fced52f804c576e825aac910ab95a7f022be555c8d186a126cd168a25908687: Status 404 returned error can't find the container with id 7fced52f804c576e825aac910ab95a7f022be555c8d186a126cd168a25908687 Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.815501 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cvnbh" event={"ID":"2e0bb805-5881-4548-aebd-0924b927cbdd","Type":"ContainerStarted","Data":"7fced52f804c576e825aac910ab95a7f022be555c8d186a126cd168a25908687"} Jan 22 06:02:20 crc kubenswrapper[4982]: I0122 06:02:20.815733 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jz42f" podUID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" containerName="registry-server" containerID="cri-o://de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959" gracePeriod=2 Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.250904 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.381413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwqmg\" (UniqueName: \"kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg\") pod \"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1\" (UID: \"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1\") " Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.393216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg" (OuterVolumeSpecName: "kube-api-access-fwqmg") pod "cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" (UID: "cc3f9160-21a2-44e8-9b73-7f21b0ff75b1"). InnerVolumeSpecName "kube-api-access-fwqmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.483137 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwqmg\" (UniqueName: \"kubernetes.io/projected/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1-kube-api-access-fwqmg\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.826840 4982 generic.go:334] "Generic (PLEG): container finished" podID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" containerID="de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959" exitCode=0 Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.826942 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jz42f" event={"ID":"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1","Type":"ContainerDied","Data":"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959"} Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.826984 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jz42f" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.827022 4982 scope.go:117] "RemoveContainer" containerID="de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.827003 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jz42f" event={"ID":"cc3f9160-21a2-44e8-9b73-7f21b0ff75b1","Type":"ContainerDied","Data":"e2b308f5c1a85b220e183bfeee2028225cae128ef151a79a04e1755c2d150ecf"} Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.830982 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cvnbh" event={"ID":"2e0bb805-5881-4548-aebd-0924b927cbdd","Type":"ContainerStarted","Data":"8cd299fa834c7353027481529fe779dd51da7faa9595f42830f5efe3d8b1786e"} Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.859300 4982 scope.go:117] "RemoveContainer" containerID="de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959" Jan 22 06:02:21 crc kubenswrapper[4982]: E0122 06:02:21.860158 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959\": container with ID starting with de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959 not found: ID does not exist" containerID="de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.860236 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959"} err="failed to get container status \"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959\": rpc error: code = NotFound desc = could not find container \"de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959\": container with ID starting with de7516a4e2f3969f97471e636552ddc40ff8a0aefcae7af00c1eea67778dd959 not found: ID does not exist" Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.861777 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.878360 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jz42f"] Jan 22 06:02:21 crc kubenswrapper[4982]: I0122 06:02:21.888142 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cvnbh" podStartSLOduration=2.388268389 podStartE2EDuration="2.888111854s" podCreationTimestamp="2026-01-22 06:02:19 +0000 UTC" firstStartedPulling="2026-01-22 06:02:20.793306669 +0000 UTC m=+1001.631944672" lastFinishedPulling="2026-01-22 06:02:21.293150134 +0000 UTC m=+1002.131788137" observedRunningTime="2026-01-22 06:02:21.883130287 +0000 UTC m=+1002.721768330" watchObservedRunningTime="2026-01-22 06:02:21.888111854 +0000 UTC m=+1002.726749877" Jan 22 06:02:23 crc kubenswrapper[4982]: I0122 06:02:23.731571 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" path="/var/lib/kubelet/pods/cc3f9160-21a2-44e8-9b73-7f21b0ff75b1/volumes" Jan 22 06:02:30 crc kubenswrapper[4982]: I0122 06:02:30.303566 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:30 crc kubenswrapper[4982]: I0122 06:02:30.304475 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:30 crc kubenswrapper[4982]: I0122 06:02:30.353763 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:30 crc kubenswrapper[4982]: I0122 06:02:30.941316 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cvnbh" Jan 22 06:02:39 crc kubenswrapper[4982]: I0122 06:02:39.939457 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:39 crc kubenswrapper[4982]: E0122 06:02:39.940109 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" containerName="registry-server" Jan 22 06:02:39 crc kubenswrapper[4982]: I0122 06:02:39.940126 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" containerName="registry-server" Jan 22 06:02:39 crc kubenswrapper[4982]: I0122 06:02:39.940285 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc3f9160-21a2-44e8-9b73-7f21b0ff75b1" containerName="registry-server" Jan 22 06:02:39 crc kubenswrapper[4982]: I0122 06:02:39.941258 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:39 crc kubenswrapper[4982]: I0122 06:02:39.960389 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.091943 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.092006 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzhjc\" (UniqueName: \"kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.092099 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.193530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.193596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzhjc\" (UniqueName: \"kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.193658 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.194206 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.194274 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.213234 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzhjc\" (UniqueName: \"kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc\") pod \"redhat-marketplace-8m8bj\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.259114 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.749664 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:40 crc kubenswrapper[4982]: I0122 06:02:40.969955 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerStarted","Data":"0845f2ad9c71da055bba6bbf989f84eb0efa8ac7e6f890709f5806d1eab002da"} Jan 22 06:02:42 crc kubenswrapper[4982]: I0122 06:02:42.994093 4982 generic.go:334] "Generic (PLEG): container finished" podID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerID="d07915a3ccec37bc37f136dc97b5ea6fa1ecec592c5b9c62370e812644071a8a" exitCode=0 Jan 22 06:02:42 crc kubenswrapper[4982]: I0122 06:02:42.994182 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerDied","Data":"d07915a3ccec37bc37f136dc97b5ea6fa1ecec592c5b9c62370e812644071a8a"} Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.552989 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc"] Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.555634 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.558596 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-nv46n" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.572032 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc"] Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.645492 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pg2x\" (UniqueName: \"kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.646277 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.646335 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.747743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pg2x\" (UniqueName: \"kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.747899 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.747935 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.748571 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.748914 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.775948 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pg2x\" (UniqueName: \"kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x\") pod \"3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:43 crc kubenswrapper[4982]: I0122 06:02:43.880016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:44 crc kubenswrapper[4982]: I0122 06:02:44.035908 4982 generic.go:334] "Generic (PLEG): container finished" podID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerID="075cd6ba591283d7205ef4a7dbe1b7ca67fd95dc7a176f05384ddfe4c60e3465" exitCode=0 Jan 22 06:02:44 crc kubenswrapper[4982]: I0122 06:02:44.035944 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerDied","Data":"075cd6ba591283d7205ef4a7dbe1b7ca67fd95dc7a176f05384ddfe4c60e3465"} Jan 22 06:02:44 crc kubenswrapper[4982]: I0122 06:02:44.158758 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc"] Jan 22 06:02:44 crc kubenswrapper[4982]: W0122 06:02:44.165305 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29d8df82_73c3_42da_adea_9c703a1ed81a.slice/crio-38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68 WatchSource:0}: Error finding container 38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68: Status 404 returned error can't find the container with id 38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68 Jan 22 06:02:45 crc kubenswrapper[4982]: I0122 06:02:45.047686 4982 generic.go:334] "Generic (PLEG): container finished" podID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerID="e4676df1816b888067f8ebb0a2c9f9d4f92dc62a09ac8def5426c9ae52eeed21" exitCode=0 Jan 22 06:02:45 crc kubenswrapper[4982]: I0122 06:02:45.048195 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" event={"ID":"29d8df82-73c3-42da-adea-9c703a1ed81a","Type":"ContainerDied","Data":"e4676df1816b888067f8ebb0a2c9f9d4f92dc62a09ac8def5426c9ae52eeed21"} Jan 22 06:02:45 crc kubenswrapper[4982]: I0122 06:02:45.048239 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" event={"ID":"29d8df82-73c3-42da-adea-9c703a1ed81a","Type":"ContainerStarted","Data":"38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68"} Jan 22 06:02:45 crc kubenswrapper[4982]: I0122 06:02:45.061198 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerStarted","Data":"0a5f77140c5ac25a131ce4d46af9e1eae0d6bc9a19369a4d65fb27fd56eaed7f"} Jan 22 06:02:45 crc kubenswrapper[4982]: I0122 06:02:45.099578 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8m8bj" podStartSLOduration=4.622356109 podStartE2EDuration="6.099557152s" podCreationTimestamp="2026-01-22 06:02:39 +0000 UTC" firstStartedPulling="2026-01-22 06:02:42.996256934 +0000 UTC m=+1023.834894977" lastFinishedPulling="2026-01-22 06:02:44.473457987 +0000 UTC m=+1025.312096020" observedRunningTime="2026-01-22 06:02:45.097563928 +0000 UTC m=+1025.936201931" watchObservedRunningTime="2026-01-22 06:02:45.099557152 +0000 UTC m=+1025.938195165" Jan 22 06:02:46 crc kubenswrapper[4982]: I0122 06:02:46.067738 4982 generic.go:334] "Generic (PLEG): container finished" podID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerID="61523daf140984d8d41f356f82d2adac71c5cbe7743570e0759379a7163c895f" exitCode=0 Jan 22 06:02:46 crc kubenswrapper[4982]: I0122 06:02:46.067997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" event={"ID":"29d8df82-73c3-42da-adea-9c703a1ed81a","Type":"ContainerDied","Data":"61523daf140984d8d41f356f82d2adac71c5cbe7743570e0759379a7163c895f"} Jan 22 06:02:47 crc kubenswrapper[4982]: I0122 06:02:47.089708 4982 generic.go:334] "Generic (PLEG): container finished" podID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerID="52ab77d8aea008935a5b76b93777c59722c27308d45fa34bfe7e693aee070126" exitCode=0 Jan 22 06:02:47 crc kubenswrapper[4982]: I0122 06:02:47.089771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" event={"ID":"29d8df82-73c3-42da-adea-9c703a1ed81a","Type":"ContainerDied","Data":"52ab77d8aea008935a5b76b93777c59722c27308d45fa34bfe7e693aee070126"} Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.342218 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.519223 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util\") pod \"29d8df82-73c3-42da-adea-9c703a1ed81a\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.519302 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle\") pod \"29d8df82-73c3-42da-adea-9c703a1ed81a\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.519336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pg2x\" (UniqueName: \"kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x\") pod \"29d8df82-73c3-42da-adea-9c703a1ed81a\" (UID: \"29d8df82-73c3-42da-adea-9c703a1ed81a\") " Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.520567 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle" (OuterVolumeSpecName: "bundle") pod "29d8df82-73c3-42da-adea-9c703a1ed81a" (UID: "29d8df82-73c3-42da-adea-9c703a1ed81a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.526625 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x" (OuterVolumeSpecName: "kube-api-access-4pg2x") pod "29d8df82-73c3-42da-adea-9c703a1ed81a" (UID: "29d8df82-73c3-42da-adea-9c703a1ed81a"). InnerVolumeSpecName "kube-api-access-4pg2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.554432 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util" (OuterVolumeSpecName: "util") pod "29d8df82-73c3-42da-adea-9c703a1ed81a" (UID: "29d8df82-73c3-42da-adea-9c703a1ed81a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.621425 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.621481 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pg2x\" (UniqueName: \"kubernetes.io/projected/29d8df82-73c3-42da-adea-9c703a1ed81a-kube-api-access-4pg2x\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.621500 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29d8df82-73c3-42da-adea-9c703a1ed81a-util\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.973743 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:02:48 crc kubenswrapper[4982]: I0122 06:02:48.973829 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:02:49 crc kubenswrapper[4982]: I0122 06:02:49.108836 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" event={"ID":"29d8df82-73c3-42da-adea-9c703a1ed81a","Type":"ContainerDied","Data":"38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68"} Jan 22 06:02:49 crc kubenswrapper[4982]: I0122 06:02:49.108922 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38cfa60db80b1b7c92ebe861c8e48de7000e92b421ff41c737147f99ce0afd68" Jan 22 06:02:49 crc kubenswrapper[4982]: I0122 06:02:49.108885 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc" Jan 22 06:02:50 crc kubenswrapper[4982]: I0122 06:02:50.259599 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:50 crc kubenswrapper[4982]: I0122 06:02:50.259683 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:50 crc kubenswrapper[4982]: I0122 06:02:50.307994 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:51 crc kubenswrapper[4982]: I0122 06:02:51.186306 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:52 crc kubenswrapper[4982]: I0122 06:02:52.294418 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:53 crc kubenswrapper[4982]: I0122 06:02:53.139894 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8m8bj" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="registry-server" containerID="cri-o://0a5f77140c5ac25a131ce4d46af9e1eae0d6bc9a19369a4d65fb27fd56eaed7f" gracePeriod=2 Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.149786 4982 generic.go:334] "Generic (PLEG): container finished" podID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerID="0a5f77140c5ac25a131ce4d46af9e1eae0d6bc9a19369a4d65fb27fd56eaed7f" exitCode=0 Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.149843 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerDied","Data":"0a5f77140c5ac25a131ce4d46af9e1eae0d6bc9a19369a4d65fb27fd56eaed7f"} Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.150166 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8m8bj" event={"ID":"63b8c9df-48d8-4a0a-95b6-8007d3fd4859","Type":"ContainerDied","Data":"0845f2ad9c71da055bba6bbf989f84eb0efa8ac7e6f890709f5806d1eab002da"} Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.150196 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0845f2ad9c71da055bba6bbf989f84eb0efa8ac7e6f890709f5806d1eab002da" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.168656 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.313144 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzhjc\" (UniqueName: \"kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc\") pod \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.313218 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities\") pod \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.313384 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content\") pod \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\" (UID: \"63b8c9df-48d8-4a0a-95b6-8007d3fd4859\") " Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.314416 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities" (OuterVolumeSpecName: "utilities") pod "63b8c9df-48d8-4a0a-95b6-8007d3fd4859" (UID: "63b8c9df-48d8-4a0a-95b6-8007d3fd4859"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.333077 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc" (OuterVolumeSpecName: "kube-api-access-wzhjc") pod "63b8c9df-48d8-4a0a-95b6-8007d3fd4859" (UID: "63b8c9df-48d8-4a0a-95b6-8007d3fd4859"). InnerVolumeSpecName "kube-api-access-wzhjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.382296 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "63b8c9df-48d8-4a0a-95b6-8007d3fd4859" (UID: "63b8c9df-48d8-4a0a-95b6-8007d3fd4859"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.414816 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.414874 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzhjc\" (UniqueName: \"kubernetes.io/projected/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-kube-api-access-wzhjc\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:54 crc kubenswrapper[4982]: I0122 06:02:54.414890 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63b8c9df-48d8-4a0a-95b6-8007d3fd4859-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.156316 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8m8bj" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.202997 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.208743 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8m8bj"] Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707531 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv"] Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707814 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="extract" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707828 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="extract" Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707862 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="extract-content" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707870 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="extract-content" Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707886 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="util" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707895 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="util" Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707909 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="extract-utilities" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707916 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="extract-utilities" Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707928 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="registry-server" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707935 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="registry-server" Jan 22 06:02:55 crc kubenswrapper[4982]: E0122 06:02:55.707946 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="pull" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.707953 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="pull" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.708074 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" containerName="registry-server" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.708094 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d8df82-73c3-42da-adea-9c703a1ed81a" containerName="extract" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.708558 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.713411 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-2km9m" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.727136 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63b8c9df-48d8-4a0a-95b6-8007d3fd4859" path="/var/lib/kubelet/pods/63b8c9df-48d8-4a0a-95b6-8007d3fd4859/volumes" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.749916 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv"] Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.836577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5snm8\" (UniqueName: \"kubernetes.io/projected/4423d43b-5bd0-4e58-adc1-b428a3254273-kube-api-access-5snm8\") pod \"openstack-operator-controller-init-5cd76577f9-ghpnv\" (UID: \"4423d43b-5bd0-4e58-adc1-b428a3254273\") " pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.938603 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5snm8\" (UniqueName: \"kubernetes.io/projected/4423d43b-5bd0-4e58-adc1-b428a3254273-kube-api-access-5snm8\") pod \"openstack-operator-controller-init-5cd76577f9-ghpnv\" (UID: \"4423d43b-5bd0-4e58-adc1-b428a3254273\") " pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:02:55 crc kubenswrapper[4982]: I0122 06:02:55.962152 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5snm8\" (UniqueName: \"kubernetes.io/projected/4423d43b-5bd0-4e58-adc1-b428a3254273-kube-api-access-5snm8\") pod \"openstack-operator-controller-init-5cd76577f9-ghpnv\" (UID: \"4423d43b-5bd0-4e58-adc1-b428a3254273\") " pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:02:56 crc kubenswrapper[4982]: I0122 06:02:56.023082 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:02:56 crc kubenswrapper[4982]: I0122 06:02:56.563038 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv"] Jan 22 06:02:57 crc kubenswrapper[4982]: I0122 06:02:57.178115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" event={"ID":"4423d43b-5bd0-4e58-adc1-b428a3254273","Type":"ContainerStarted","Data":"993b7811272945e784233070158c93fde023c424fa0f03f2d5594d26bae89e17"} Jan 22 06:03:01 crc kubenswrapper[4982]: I0122 06:03:01.204512 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" event={"ID":"4423d43b-5bd0-4e58-adc1-b428a3254273","Type":"ContainerStarted","Data":"1d2abc3840e22cbc8dab2289c705197f905c29a92d92a5c41e3b910afbcf4073"} Jan 22 06:03:01 crc kubenswrapper[4982]: I0122 06:03:01.205210 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:03:01 crc kubenswrapper[4982]: I0122 06:03:01.255355 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" podStartSLOduration=2.030664813 podStartE2EDuration="6.255335834s" podCreationTimestamp="2026-01-22 06:02:55 +0000 UTC" firstStartedPulling="2026-01-22 06:02:56.558579054 +0000 UTC m=+1037.397217057" lastFinishedPulling="2026-01-22 06:03:00.783250065 +0000 UTC m=+1041.621888078" observedRunningTime="2026-01-22 06:03:01.247991236 +0000 UTC m=+1042.086629339" watchObservedRunningTime="2026-01-22 06:03:01.255335834 +0000 UTC m=+1042.093973847" Jan 22 06:03:06 crc kubenswrapper[4982]: I0122 06:03:06.026709 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-5cd76577f9-ghpnv" Jan 22 06:03:18 crc kubenswrapper[4982]: I0122 06:03:18.973351 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:03:18 crc kubenswrapper[4982]: I0122 06:03:18.973900 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:03:18 crc kubenswrapper[4982]: I0122 06:03:18.973944 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:03:18 crc kubenswrapper[4982]: I0122 06:03:18.974523 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:03:18 crc kubenswrapper[4982]: I0122 06:03:18.974586 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041" gracePeriod=600 Jan 22 06:03:20 crc kubenswrapper[4982]: I0122 06:03:20.406362 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041" exitCode=0 Jan 22 06:03:20 crc kubenswrapper[4982]: I0122 06:03:20.406448 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041"} Jan 22 06:03:20 crc kubenswrapper[4982]: I0122 06:03:20.407142 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc"} Jan 22 06:03:20 crc kubenswrapper[4982]: I0122 06:03:20.407177 4982 scope.go:117] "RemoveContainer" containerID="17cf6eb07357fb9bc149ba254fa2fe7cf10635a5cee0a8371d5956f5efe8c33f" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.010092 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.011764 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.012662 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.014841 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.017542 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.018639 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.020058 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-bgc7t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.020308 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-mnmj8" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.022836 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-fkk52" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.037504 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.045831 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.047281 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.063592 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.064956 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.067608 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-wsgqp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.076295 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.077245 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.080919 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.083586 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq66k\" (UniqueName: \"kubernetes.io/projected/e884b772-f17f-410c-9a2a-1b87fcda735b-kube-api-access-fq66k\") pod \"glance-operator-controller-manager-78fdd796fd-hjbqs\" (UID: \"e884b772-f17f-410c-9a2a-1b87fcda735b\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.083734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzs5l\" (UniqueName: \"kubernetes.io/projected/a7fe3154-ef99-4ce9-9151-605f734269f1-kube-api-access-jzs5l\") pod \"cinder-operator-controller-manager-69cf5d4557-22wjh\" (UID: \"a7fe3154-ef99-4ce9-9151-605f734269f1\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.083774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2mc2\" (UniqueName: \"kubernetes.io/projected/18f9bc75-69ce-4299-ab4a-c280781b056c-kube-api-access-f2mc2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-7rhfh\" (UID: \"18f9bc75-69ce-4299-ab4a-c280781b056c\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.083801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmpr7\" (UniqueName: \"kubernetes.io/projected/ca35f48e-eddd-46f2-8369-f1e642432834-kube-api-access-xmpr7\") pod \"designate-operator-controller-manager-b45d7bf98-hstbj\" (UID: \"ca35f48e-eddd-46f2-8369-f1e642432834\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.084525 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-72bzz" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.116763 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.124987 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.125966 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.131156 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-gvv45" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.141944 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.143213 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.148195 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-wvrw5" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.148372 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.170965 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.172016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.174066 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xzh9h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.174517 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq66k\" (UniqueName: \"kubernetes.io/projected/e884b772-f17f-410c-9a2a-1b87fcda735b-kube-api-access-fq66k\") pod \"glance-operator-controller-manager-78fdd796fd-hjbqs\" (UID: \"e884b772-f17f-410c-9a2a-1b87fcda735b\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187156 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wh6v\" (UniqueName: \"kubernetes.io/projected/bca8ffde-e486-4000-8d0b-39a275b64803-kube-api-access-5wh6v\") pod \"heat-operator-controller-manager-594c8c9d5d-f4l4h\" (UID: \"bca8ffde-e486-4000-8d0b-39a275b64803\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187179 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnx49\" (UniqueName: \"kubernetes.io/projected/a1825b14-129b-459a-b08b-7e62c7f2414a-kube-api-access-vnx49\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7zk22\" (UID: \"a1825b14-129b-459a-b08b-7e62c7f2414a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187223 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzs5l\" (UniqueName: \"kubernetes.io/projected/a7fe3154-ef99-4ce9-9151-605f734269f1-kube-api-access-jzs5l\") pod \"cinder-operator-controller-manager-69cf5d4557-22wjh\" (UID: \"a7fe3154-ef99-4ce9-9151-605f734269f1\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187240 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n97b8\" (UniqueName: \"kubernetes.io/projected/ef693655-09f3-4809-a4b2-8930551fb3f1-kube-api-access-n97b8\") pod \"horizon-operator-controller-manager-77d5c5b54f-kxwmj\" (UID: \"ef693655-09f3-4809-a4b2-8930551fb3f1\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187258 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187275 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqrd9\" (UniqueName: \"kubernetes.io/projected/84a649d7-4c8c-4da7-93b3-9e537b0207ee-kube-api-access-dqrd9\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187292 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2mc2\" (UniqueName: \"kubernetes.io/projected/18f9bc75-69ce-4299-ab4a-c280781b056c-kube-api-access-f2mc2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-7rhfh\" (UID: \"18f9bc75-69ce-4299-ab4a-c280781b056c\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.187308 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmpr7\" (UniqueName: \"kubernetes.io/projected/ca35f48e-eddd-46f2-8369-f1e642432834-kube-api-access-xmpr7\") pod \"designate-operator-controller-manager-b45d7bf98-hstbj\" (UID: \"ca35f48e-eddd-46f2-8369-f1e642432834\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.224261 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.252964 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq66k\" (UniqueName: \"kubernetes.io/projected/e884b772-f17f-410c-9a2a-1b87fcda735b-kube-api-access-fq66k\") pod \"glance-operator-controller-manager-78fdd796fd-hjbqs\" (UID: \"e884b772-f17f-410c-9a2a-1b87fcda735b\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.256146 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2mc2\" (UniqueName: \"kubernetes.io/projected/18f9bc75-69ce-4299-ab4a-c280781b056c-kube-api-access-f2mc2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-7rhfh\" (UID: \"18f9bc75-69ce-4299-ab4a-c280781b056c\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.256183 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzs5l\" (UniqueName: \"kubernetes.io/projected/a7fe3154-ef99-4ce9-9151-605f734269f1-kube-api-access-jzs5l\") pod \"cinder-operator-controller-manager-69cf5d4557-22wjh\" (UID: \"a7fe3154-ef99-4ce9-9151-605f734269f1\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.256833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmpr7\" (UniqueName: \"kubernetes.io/projected/ca35f48e-eddd-46f2-8369-f1e642432834-kube-api-access-xmpr7\") pod \"designate-operator-controller-manager-b45d7bf98-hstbj\" (UID: \"ca35f48e-eddd-46f2-8369-f1e642432834\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.279200 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.289791 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n97b8\" (UniqueName: \"kubernetes.io/projected/ef693655-09f3-4809-a4b2-8930551fb3f1-kube-api-access-n97b8\") pod \"horizon-operator-controller-manager-77d5c5b54f-kxwmj\" (UID: \"ef693655-09f3-4809-a4b2-8930551fb3f1\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.289837 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.289879 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqrd9\" (UniqueName: \"kubernetes.io/projected/84a649d7-4c8c-4da7-93b3-9e537b0207ee-kube-api-access-dqrd9\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.289938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wh6v\" (UniqueName: \"kubernetes.io/projected/bca8ffde-e486-4000-8d0b-39a275b64803-kube-api-access-5wh6v\") pod \"heat-operator-controller-manager-594c8c9d5d-f4l4h\" (UID: \"bca8ffde-e486-4000-8d0b-39a275b64803\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.289962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnx49\" (UniqueName: \"kubernetes.io/projected/a1825b14-129b-459a-b08b-7e62c7f2414a-kube-api-access-vnx49\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7zk22\" (UID: \"a1825b14-129b-459a-b08b-7e62c7f2414a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.290633 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.290686 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert podName:84a649d7-4c8c-4da7-93b3-9e537b0207ee nodeName:}" failed. No retries permitted until 2026-01-22 06:03:40.790665554 +0000 UTC m=+1081.629303557 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert") pod "infra-operator-controller-manager-54ccf4f85d-6zh6c" (UID: "84a649d7-4c8c-4da7-93b3-9e537b0207ee") : secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.312099 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.313219 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.328141 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqrd9\" (UniqueName: \"kubernetes.io/projected/84a649d7-4c8c-4da7-93b3-9e537b0207ee-kube-api-access-dqrd9\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.333922 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.334711 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnx49\" (UniqueName: \"kubernetes.io/projected/a1825b14-129b-459a-b08b-7e62c7f2414a-kube-api-access-vnx49\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7zk22\" (UID: \"a1825b14-129b-459a-b08b-7e62c7f2414a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.334765 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.338964 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.339816 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.341437 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n97b8\" (UniqueName: \"kubernetes.io/projected/ef693655-09f3-4809-a4b2-8930551fb3f1-kube-api-access-n97b8\") pod \"horizon-operator-controller-manager-77d5c5b54f-kxwmj\" (UID: \"ef693655-09f3-4809-a4b2-8930551fb3f1\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.341711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-n4fjb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.341923 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-p6g55" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.350267 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-tqwcd" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.351848 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.359202 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.359479 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.360516 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wh6v\" (UniqueName: \"kubernetes.io/projected/bca8ffde-e486-4000-8d0b-39a275b64803-kube-api-access-5wh6v\") pod \"heat-operator-controller-manager-594c8c9d5d-f4l4h\" (UID: \"bca8ffde-e486-4000-8d0b-39a275b64803\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.370880 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.375910 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.376690 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.379880 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.386619 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.390307 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfgjb\" (UniqueName: \"kubernetes.io/projected/d6afaf94-bf6d-4559-ab50-c2320aade035-kube-api-access-xfgjb\") pod \"keystone-operator-controller-manager-b8b6d4659-vkn5h\" (UID: \"d6afaf94-bf6d-4559-ab50-c2320aade035\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.390364 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qkzh\" (UniqueName: \"kubernetes.io/projected/99845353-4c2d-4caa-bf12-c396d6b91a82-kube-api-access-9qkzh\") pod \"neutron-operator-controller-manager-5d8f59fb49-6pr97\" (UID: \"99845353-4c2d-4caa-bf12-c396d6b91a82\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.390412 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh889\" (UniqueName: \"kubernetes.io/projected/6d6c68f5-4111-471b-875c-0d498c4b046d-kube-api-access-xh889\") pod \"mariadb-operator-controller-manager-c87fff755-tklld\" (UID: \"6d6c68f5-4111-471b-875c-0d498c4b046d\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.390448 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwdpt\" (UniqueName: \"kubernetes.io/projected/e63b183e-a84a-4964-a082-5f7768d8c472-kube-api-access-pwdpt\") pod \"manila-operator-controller-manager-78c6999f6f-t276t\" (UID: \"e63b183e-a84a-4964-a082-5f7768d8c472\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.394023 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-4vpdq" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.400772 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.402909 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.403820 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.407558 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-j7hzc" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.413948 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.417223 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.430925 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.430993 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.434004 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.434167 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.435002 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.437199 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-vg854" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.438614 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-ttnhs" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.441448 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.442097 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.444920 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.446688 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.451924 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-mp622" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.452036 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.467489 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.467941 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.468898 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.469439 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.480913 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.481051 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.483243 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-h5kcr" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.487689 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491059 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp9rq\" (UniqueName: \"kubernetes.io/projected/7cbf8f35-c5cf-4e1c-8718-023380d9ac26-kube-api-access-vp9rq\") pod \"nova-operator-controller-manager-6b8bc8d87d-rk69w\" (UID: \"7cbf8f35-c5cf-4e1c-8718-023380d9ac26\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh889\" (UniqueName: \"kubernetes.io/projected/6d6c68f5-4111-471b-875c-0d498c4b046d-kube-api-access-xh889\") pod \"mariadb-operator-controller-manager-c87fff755-tklld\" (UID: \"6d6c68f5-4111-471b-875c-0d498c4b046d\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491112 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ndbg\" (UniqueName: \"kubernetes.io/projected/1fa2993a-2231-445d-aa77-7190fb3a8fcb-kube-api-access-8ndbg\") pod \"placement-operator-controller-manager-5d646b7d76-9nfxp\" (UID: \"1fa2993a-2231-445d-aa77-7190fb3a8fcb\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491137 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsq8k\" (UniqueName: \"kubernetes.io/projected/3bde3f49-9b8e-4cb1-8eac-6eb047eda094-kube-api-access-vsq8k\") pod \"swift-operator-controller-manager-547cbdb99f-qgxkb\" (UID: \"3bde3f49-9b8e-4cb1-8eac-6eb047eda094\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491166 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwdpt\" (UniqueName: \"kubernetes.io/projected/e63b183e-a84a-4964-a082-5f7768d8c472-kube-api-access-pwdpt\") pod \"manila-operator-controller-manager-78c6999f6f-t276t\" (UID: \"e63b183e-a84a-4964-a082-5f7768d8c472\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfgjb\" (UniqueName: \"kubernetes.io/projected/d6afaf94-bf6d-4559-ab50-c2320aade035-kube-api-access-xfgjb\") pod \"keystone-operator-controller-manager-b8b6d4659-vkn5h\" (UID: \"d6afaf94-bf6d-4559-ab50-c2320aade035\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491206 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491224 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4m8j\" (UniqueName: \"kubernetes.io/projected/47ed3df3-a23e-4021-b786-b99d1b710639-kube-api-access-q4m8j\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491266 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbrjp\" (UniqueName: \"kubernetes.io/projected/099ae039-177c-4335-a611-990dcdf9c655-kube-api-access-bbrjp\") pod \"ovn-operator-controller-manager-55db956ddc-284c4\" (UID: \"099ae039-177c-4335-a611-990dcdf9c655\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491292 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvq89\" (UniqueName: \"kubernetes.io/projected/4c78936d-aa34-45c2-8e85-67f6de306d0f-kube-api-access-wvq89\") pod \"octavia-operator-controller-manager-7bd9774b6-f44cg\" (UID: \"4c78936d-aa34-45c2-8e85-67f6de306d0f\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.491323 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qkzh\" (UniqueName: \"kubernetes.io/projected/99845353-4c2d-4caa-bf12-c396d6b91a82-kube-api-access-9qkzh\") pod \"neutron-operator-controller-manager-5d8f59fb49-6pr97\" (UID: \"99845353-4c2d-4caa-bf12-c396d6b91a82\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.492036 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-j2s6m" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.498184 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.506869 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.508586 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.517256 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-tskt8" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.518499 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.534750 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.535538 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.542515 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh889\" (UniqueName: \"kubernetes.io/projected/6d6c68f5-4111-471b-875c-0d498c4b046d-kube-api-access-xh889\") pod \"mariadb-operator-controller-manager-c87fff755-tklld\" (UID: \"6d6c68f5-4111-471b-875c-0d498c4b046d\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.543168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfgjb\" (UniqueName: \"kubernetes.io/projected/d6afaf94-bf6d-4559-ab50-c2320aade035-kube-api-access-xfgjb\") pod \"keystone-operator-controller-manager-b8b6d4659-vkn5h\" (UID: \"d6afaf94-bf6d-4559-ab50-c2320aade035\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.543390 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-kx7lc" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.545415 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qkzh\" (UniqueName: \"kubernetes.io/projected/99845353-4c2d-4caa-bf12-c396d6b91a82-kube-api-access-9qkzh\") pod \"neutron-operator-controller-manager-5d8f59fb49-6pr97\" (UID: \"99845353-4c2d-4caa-bf12-c396d6b91a82\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.557532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.558697 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.563364 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwdpt\" (UniqueName: \"kubernetes.io/projected/e63b183e-a84a-4964-a082-5f7768d8c472-kube-api-access-pwdpt\") pod \"manila-operator-controller-manager-78c6999f6f-t276t\" (UID: \"e63b183e-a84a-4964-a082-5f7768d8c472\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.568240 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.590890 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.592035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvxbf\" (UniqueName: \"kubernetes.io/projected/afdd5f3a-706e-4f4f-930e-c952e4b0c6dc-kube-api-access-fvxbf\") pod \"test-operator-controller-manager-69797bbcbd-k7ffz\" (UID: \"afdd5f3a-706e-4f4f-930e-c952e4b0c6dc\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.592843 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp9rq\" (UniqueName: \"kubernetes.io/projected/7cbf8f35-c5cf-4e1c-8718-023380d9ac26-kube-api-access-vp9rq\") pod \"nova-operator-controller-manager-6b8bc8d87d-rk69w\" (UID: \"7cbf8f35-c5cf-4e1c-8718-023380d9ac26\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593122 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ndbg\" (UniqueName: \"kubernetes.io/projected/1fa2993a-2231-445d-aa77-7190fb3a8fcb-kube-api-access-8ndbg\") pod \"placement-operator-controller-manager-5d646b7d76-9nfxp\" (UID: \"1fa2993a-2231-445d-aa77-7190fb3a8fcb\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593639 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsq8k\" (UniqueName: \"kubernetes.io/projected/3bde3f49-9b8e-4cb1-8eac-6eb047eda094-kube-api-access-vsq8k\") pod \"swift-operator-controller-manager-547cbdb99f-qgxkb\" (UID: \"3bde3f49-9b8e-4cb1-8eac-6eb047eda094\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593668 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46w2v\" (UniqueName: \"kubernetes.io/projected/030e0626-7169-45ea-9981-78c910b04226-kube-api-access-46w2v\") pod \"telemetry-operator-controller-manager-85cd9769bb-jmsqf\" (UID: \"030e0626-7169-45ea-9981-78c910b04226\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593702 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593721 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4m8j\" (UniqueName: \"kubernetes.io/projected/47ed3df3-a23e-4021-b786-b99d1b710639-kube-api-access-q4m8j\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbrjp\" (UniqueName: \"kubernetes.io/projected/099ae039-177c-4335-a611-990dcdf9c655-kube-api-access-bbrjp\") pod \"ovn-operator-controller-manager-55db956ddc-284c4\" (UID: \"099ae039-177c-4335-a611-990dcdf9c655\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.593766 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvq89\" (UniqueName: \"kubernetes.io/projected/4c78936d-aa34-45c2-8e85-67f6de306d0f-kube-api-access-wvq89\") pod \"octavia-operator-controller-manager-7bd9774b6-f44cg\" (UID: \"4c78936d-aa34-45c2-8e85-67f6de306d0f\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.596229 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.596299 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:41.096261123 +0000 UTC m=+1081.934899126 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.635218 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ndbg\" (UniqueName: \"kubernetes.io/projected/1fa2993a-2231-445d-aa77-7190fb3a8fcb-kube-api-access-8ndbg\") pod \"placement-operator-controller-manager-5d646b7d76-9nfxp\" (UID: \"1fa2993a-2231-445d-aa77-7190fb3a8fcb\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.638225 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvq89\" (UniqueName: \"kubernetes.io/projected/4c78936d-aa34-45c2-8e85-67f6de306d0f-kube-api-access-wvq89\") pod \"octavia-operator-controller-manager-7bd9774b6-f44cg\" (UID: \"4c78936d-aa34-45c2-8e85-67f6de306d0f\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.642436 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp9rq\" (UniqueName: \"kubernetes.io/projected/7cbf8f35-c5cf-4e1c-8718-023380d9ac26-kube-api-access-vp9rq\") pod \"nova-operator-controller-manager-6b8bc8d87d-rk69w\" (UID: \"7cbf8f35-c5cf-4e1c-8718-023380d9ac26\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.642850 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.663395 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsq8k\" (UniqueName: \"kubernetes.io/projected/3bde3f49-9b8e-4cb1-8eac-6eb047eda094-kube-api-access-vsq8k\") pod \"swift-operator-controller-manager-547cbdb99f-qgxkb\" (UID: \"3bde3f49-9b8e-4cb1-8eac-6eb047eda094\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.681252 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4m8j\" (UniqueName: \"kubernetes.io/projected/47ed3df3-a23e-4021-b786-b99d1b710639-kube-api-access-q4m8j\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.685370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbrjp\" (UniqueName: \"kubernetes.io/projected/099ae039-177c-4335-a611-990dcdf9c655-kube-api-access-bbrjp\") pod \"ovn-operator-controller-manager-55db956ddc-284c4\" (UID: \"099ae039-177c-4335-a611-990dcdf9c655\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.699390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46w2v\" (UniqueName: \"kubernetes.io/projected/030e0626-7169-45ea-9981-78c910b04226-kube-api-access-46w2v\") pod \"telemetry-operator-controller-manager-85cd9769bb-jmsqf\" (UID: \"030e0626-7169-45ea-9981-78c910b04226\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.699530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvxbf\" (UniqueName: \"kubernetes.io/projected/afdd5f3a-706e-4f4f-930e-c952e4b0c6dc-kube-api-access-fvxbf\") pod \"test-operator-controller-manager-69797bbcbd-k7ffz\" (UID: \"afdd5f3a-706e-4f4f-930e-c952e4b0c6dc\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.728319 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46w2v\" (UniqueName: \"kubernetes.io/projected/030e0626-7169-45ea-9981-78c910b04226-kube-api-access-46w2v\") pod \"telemetry-operator-controller-manager-85cd9769bb-jmsqf\" (UID: \"030e0626-7169-45ea-9981-78c910b04226\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.738536 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.739498 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.748249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvxbf\" (UniqueName: \"kubernetes.io/projected/afdd5f3a-706e-4f4f-930e-c952e4b0c6dc-kube-api-access-fvxbf\") pod \"test-operator-controller-manager-69797bbcbd-k7ffz\" (UID: \"afdd5f3a-706e-4f4f-930e-c952e4b0c6dc\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.754951 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.758240 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-bhx98" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.772264 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.786092 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.787096 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.803104 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.803503 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: E0122 06:03:40.803556 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert podName:84a649d7-4c8c-4da7-93b3-9e537b0207ee nodeName:}" failed. No retries permitted until 2026-01-22 06:03:41.803537996 +0000 UTC m=+1082.642175999 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert") pod "infra-operator-controller-manager-54ccf4f85d-6zh6c" (UID: "84a649d7-4c8c-4da7-93b3-9e537b0207ee") : secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.854067 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.895086 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.900641 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.901650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.903943 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.904360 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-mnt65" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.904548 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.905854 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zs5v\" (UniqueName: \"kubernetes.io/projected/8b31e08d-b4f8-482f-b413-25897c734299-kube-api-access-7zs5v\") pod \"watcher-operator-controller-manager-5ffb9c6597-prb7h\" (UID: \"8b31e08d-b4f8-482f-b413-25897c734299\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.916301 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.933142 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.944556 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc"] Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.945519 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.949791 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-kvnk5" Jan 22 06:03:40 crc kubenswrapper[4982]: I0122 06:03:40.955926 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc"] Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:40.997828 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.007023 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8rd7\" (UniqueName: \"kubernetes.io/projected/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-kube-api-access-z8rd7\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.007906 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.008001 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.008025 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zs5v\" (UniqueName: \"kubernetes.io/projected/8b31e08d-b4f8-482f-b413-25897c734299-kube-api-access-7zs5v\") pod \"watcher-operator-controller-manager-5ffb9c6597-prb7h\" (UID: \"8b31e08d-b4f8-482f-b413-25897c734299\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.023415 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.033202 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zs5v\" (UniqueName: \"kubernetes.io/projected/8b31e08d-b4f8-482f-b413-25897c734299-kube-api-access-7zs5v\") pod \"watcher-operator-controller-manager-5ffb9c6597-prb7h\" (UID: \"8b31e08d-b4f8-482f-b413-25897c734299\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.044799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.110502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.110572 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8rd7\" (UniqueName: \"kubernetes.io/projected/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-kube-api-access-z8rd7\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.110600 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxwtv\" (UniqueName: \"kubernetes.io/projected/bd17bdc3-15ba-47d5-88f9-56336faa71bf-kube-api-access-zxwtv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbcc\" (UID: \"bd17bdc3-15ba-47d5-88f9-56336faa71bf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.110671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.110702 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.110814 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.110893 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:42.110851611 +0000 UTC m=+1082.949489614 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.111222 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.111249 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:41.611240413 +0000 UTC m=+1082.449878416 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.111665 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.111691 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:41.611681825 +0000 UTC m=+1082.450319828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.135119 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8rd7\" (UniqueName: \"kubernetes.io/projected/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-kube-api-access-z8rd7\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.211926 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxwtv\" (UniqueName: \"kubernetes.io/projected/bd17bdc3-15ba-47d5-88f9-56336faa71bf-kube-api-access-zxwtv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbcc\" (UID: \"bd17bdc3-15ba-47d5-88f9-56336faa71bf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.249270 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxwtv\" (UniqueName: \"kubernetes.io/projected/bd17bdc3-15ba-47d5-88f9-56336faa71bf-kube-api-access-zxwtv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbcc\" (UID: \"bd17bdc3-15ba-47d5-88f9-56336faa71bf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.356400 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh"] Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.361038 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh"] Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.389719 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.599269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" event={"ID":"18f9bc75-69ce-4299-ab4a-c280781b056c","Type":"ContainerStarted","Data":"ada0e3684a144aba813f64fdd46efa8a6036348d181e37212bf3ff247afc18fb"} Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.600901 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" event={"ID":"a7fe3154-ef99-4ce9-9151-605f734269f1","Type":"ContainerStarted","Data":"f1e03612236b205257a672fa9d3fae61b0fb3bfded1ee98c8f7576c6c339da3d"} Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.621153 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.621227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.621382 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.621438 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:42.621424232 +0000 UTC m=+1083.460062225 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.621458 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.621579 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:42.621554786 +0000 UTC m=+1083.460192869 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.823834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.824056 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: E0122 06:03:41.824130 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert podName:84a649d7-4c8c-4da7-93b3-9e537b0207ee nodeName:}" failed. No retries permitted until 2026-01-22 06:03:43.82411124 +0000 UTC m=+1084.662749243 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert") pod "infra-operator-controller-manager-54ccf4f85d-6zh6c" (UID: "84a649d7-4c8c-4da7-93b3-9e537b0207ee") : secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.963918 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj"] Jan 22 06:03:41 crc kubenswrapper[4982]: W0122 06:03:41.974225 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca35f48e_eddd_46f2_8369_f1e642432834.slice/crio-b98cce10d35f4344ad240f1dc2259c7342b38fe72a780df6d86ed7118e76b346 WatchSource:0}: Error finding container b98cce10d35f4344ad240f1dc2259c7342b38fe72a780df6d86ed7118e76b346: Status 404 returned error can't find the container with id b98cce10d35f4344ad240f1dc2259c7342b38fe72a780df6d86ed7118e76b346 Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.980333 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg"] Jan 22 06:03:41 crc kubenswrapper[4982]: I0122 06:03:41.991456 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.050203 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.072147 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.079022 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.081430 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.093094 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.131381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.132825 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.132904 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:44.132885736 +0000 UTC m=+1084.971523739 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.141586 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.165959 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.175847 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.314574 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t"] Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.325489 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pwdpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-78c6999f6f-t276t_openstack-operators(e63b183e-a84a-4964-a082-5f7768d8c472): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.326703 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" podUID="e63b183e-a84a-4964-a082-5f7768d8c472" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.352221 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf"] Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.360891 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-46w2v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-85cd9769bb-jmsqf_openstack-operators(030e0626-7169-45ea-9981-78c910b04226): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.362099 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" podUID="030e0626-7169-45ea-9981-78c910b04226" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.372548 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h"] Jan 22 06:03:42 crc kubenswrapper[4982]: W0122 06:03:42.384034 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafdd5f3a_706e_4f4f_930e_c952e4b0c6dc.slice/crio-582f65f95f8f5734d81f9bbb1622ea87759bd449466fd4fd4466fd901c5c4bbd WatchSource:0}: Error finding container 582f65f95f8f5734d81f9bbb1622ea87759bd449466fd4fd4466fd901c5c4bbd: Status 404 returned error can't find the container with id 582f65f95f8f5734d81f9bbb1622ea87759bd449466fd4fd4466fd901c5c4bbd Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.385917 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz"] Jan 22 06:03:42 crc kubenswrapper[4982]: W0122 06:03:42.387828 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6afaf94_bf6d_4559_ab50_c2320aade035.slice/crio-089e765f83af2ca38f168f4fb0cef1329b738a9ff0e99407132bbe11068c41c0 WatchSource:0}: Error finding container 089e765f83af2ca38f168f4fb0cef1329b738a9ff0e99407132bbe11068c41c0: Status 404 returned error can't find the container with id 089e765f83af2ca38f168f4fb0cef1329b738a9ff0e99407132bbe11068c41c0 Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.390431 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xfgjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-vkn5h_openstack-operators(d6afaf94-bf6d-4559-ab50-c2320aade035): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.391989 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podUID="d6afaf94-bf6d-4559-ab50-c2320aade035" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.398031 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.403976 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb"] Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.406323 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc"] Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.424222 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7zs5v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5ffb9c6597-prb7h_openstack-operators(8b31e08d-b4f8-482f-b413-25897c734299): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.424757 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zxwtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-4dbcc_openstack-operators(bd17bdc3-15ba-47d5-88f9-56336faa71bf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.425302 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podUID="8b31e08d-b4f8-482f-b413-25897c734299" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.426431 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podUID="bd17bdc3-15ba-47d5-88f9-56336faa71bf" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.428754 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vsq8k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-qgxkb_openstack-operators(3bde3f49-9b8e-4cb1-8eac-6eb047eda094): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.430070 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podUID="3bde3f49-9b8e-4cb1-8eac-6eb047eda094" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.632048 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" event={"ID":"bca8ffde-e486-4000-8d0b-39a275b64803","Type":"ContainerStarted","Data":"b027eaa2cfd2728591e8316173afd0d4d540f3d6c2d0704cfccf58534fbf9d44"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.646609 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.646711 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.646856 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.646938 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.646942 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:44.6469269 +0000 UTC m=+1085.485564903 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.646984 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:44.646973561 +0000 UTC m=+1085.485611554 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.665083 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" event={"ID":"afdd5f3a-706e-4f4f-930e-c952e4b0c6dc","Type":"ContainerStarted","Data":"582f65f95f8f5734d81f9bbb1622ea87759bd449466fd4fd4466fd901c5c4bbd"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.693246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" event={"ID":"1fa2993a-2231-445d-aa77-7190fb3a8fcb","Type":"ContainerStarted","Data":"ddf79a2243f8e98c5298d1ab1bf6294ce165193a5b240193384313b60f108a6c"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.700159 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" event={"ID":"7cbf8f35-c5cf-4e1c-8718-023380d9ac26","Type":"ContainerStarted","Data":"4a4587a2b783110457acbc5d9b82bb0fab1d6b0017b4e1950916928196837ef7"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.741843 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" event={"ID":"6d6c68f5-4111-471b-875c-0d498c4b046d","Type":"ContainerStarted","Data":"644478b279f2d00fc38d7e25f9708f94ca3e22f2841bcdb72dc13bc03d202e48"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.744029 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" event={"ID":"ef693655-09f3-4809-a4b2-8930551fb3f1","Type":"ContainerStarted","Data":"936e71b8567b0b7b7e6541bc9a418423536792a1e799a7d8a9925f9fcc78fefd"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.745561 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" event={"ID":"d6afaf94-bf6d-4559-ab50-c2320aade035","Type":"ContainerStarted","Data":"089e765f83af2ca38f168f4fb0cef1329b738a9ff0e99407132bbe11068c41c0"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.746913 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" event={"ID":"030e0626-7169-45ea-9981-78c910b04226","Type":"ContainerStarted","Data":"dff71a4362c4e829639bea216af0282f93801c9519dd725f3da6316277b15aff"} Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.748456 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" podUID="030e0626-7169-45ea-9981-78c910b04226" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.748475 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podUID="d6afaf94-bf6d-4559-ab50-c2320aade035" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.752774 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" event={"ID":"a1825b14-129b-459a-b08b-7e62c7f2414a","Type":"ContainerStarted","Data":"747d1dba68dd49df59eb7286fe6110bf9f99bcba889d8ab96fadb18f50c0a06e"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.756122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" event={"ID":"4c78936d-aa34-45c2-8e85-67f6de306d0f","Type":"ContainerStarted","Data":"c9aa29814560456eb929e4f2a945144c94f89198cccb46bc6c932fd994f30c69"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.758895 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" event={"ID":"e63b183e-a84a-4964-a082-5f7768d8c472","Type":"ContainerStarted","Data":"721e45957cf802db575940bf92e6f26285851e2095a536db5d24f90c55ce4834"} Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.760967 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" podUID="e63b183e-a84a-4964-a082-5f7768d8c472" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.762609 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" event={"ID":"3bde3f49-9b8e-4cb1-8eac-6eb047eda094","Type":"ContainerStarted","Data":"a0b4a5de1713cadbc658a1e5a668c888c0f4df3196ce3d45f5efd47f9475a6f3"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.763640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" event={"ID":"8b31e08d-b4f8-482f-b413-25897c734299","Type":"ContainerStarted","Data":"16705be046ea360e1f5aab16502fe8cb4ab0986f28b64856c9e1ff7b915753c3"} Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.764285 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podUID="3bde3f49-9b8e-4cb1-8eac-6eb047eda094" Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.766999 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podUID="8b31e08d-b4f8-482f-b413-25897c734299" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.769725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" event={"ID":"99845353-4c2d-4caa-bf12-c396d6b91a82","Type":"ContainerStarted","Data":"29b8b0837630aa73cbbce03d1bd2e9cd79711b52b01791c4827417b212678d6d"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.786141 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" event={"ID":"ca35f48e-eddd-46f2-8369-f1e642432834","Type":"ContainerStarted","Data":"b98cce10d35f4344ad240f1dc2259c7342b38fe72a780df6d86ed7118e76b346"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.793815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" event={"ID":"e884b772-f17f-410c-9a2a-1b87fcda735b","Type":"ContainerStarted","Data":"de4fc76af056d4f46dbb7096a13ade9236feaab949b3afb32733e3d83657c6eb"} Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.799639 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" event={"ID":"bd17bdc3-15ba-47d5-88f9-56336faa71bf","Type":"ContainerStarted","Data":"2258785ad581c21d757fc457748e0cfc490b945d6d88eac538cee73bb6144742"} Jan 22 06:03:42 crc kubenswrapper[4982]: E0122 06:03:42.801072 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podUID="bd17bdc3-15ba-47d5-88f9-56336faa71bf" Jan 22 06:03:42 crc kubenswrapper[4982]: I0122 06:03:42.801444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" event={"ID":"099ae039-177c-4335-a611-990dcdf9c655","Type":"ContainerStarted","Data":"2d5e1335c2b870b6a9a3bf2476f4aad28d09bbbf722652cc1d9ab0d58dffca14"} Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.820834 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podUID="d6afaf94-bf6d-4559-ab50-c2320aade035" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.820890 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podUID="3bde3f49-9b8e-4cb1-8eac-6eb047eda094" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.820973 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podUID="bd17bdc3-15ba-47d5-88f9-56336faa71bf" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.821056 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" podUID="e63b183e-a84a-4964-a082-5f7768d8c472" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.821123 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" podUID="030e0626-7169-45ea-9981-78c910b04226" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.821191 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podUID="8b31e08d-b4f8-482f-b413-25897c734299" Jan 22 06:03:43 crc kubenswrapper[4982]: I0122 06:03:43.885266 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.885445 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:43 crc kubenswrapper[4982]: E0122 06:03:43.885489 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert podName:84a649d7-4c8c-4da7-93b3-9e537b0207ee nodeName:}" failed. No retries permitted until 2026-01-22 06:03:47.885473556 +0000 UTC m=+1088.724111559 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert") pod "infra-operator-controller-manager-54ccf4f85d-6zh6c" (UID: "84a649d7-4c8c-4da7-93b3-9e537b0207ee") : secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: I0122 06:03:44.189619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.190206 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.190258 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:48.190244724 +0000 UTC m=+1089.028882727 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: I0122 06:03:44.699909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:44 crc kubenswrapper[4982]: I0122 06:03:44.699986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.700079 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.700149 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:48.700131275 +0000 UTC m=+1089.538769278 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.700149 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:44 crc kubenswrapper[4982]: E0122 06:03:44.700201 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:48.700186277 +0000 UTC m=+1089.538824280 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:47 crc kubenswrapper[4982]: I0122 06:03:47.947806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:47 crc kubenswrapper[4982]: E0122 06:03:47.947992 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:47 crc kubenswrapper[4982]: E0122 06:03:47.948702 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert podName:84a649d7-4c8c-4da7-93b3-9e537b0207ee nodeName:}" failed. No retries permitted until 2026-01-22 06:03:55.948682539 +0000 UTC m=+1096.787320542 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert") pod "infra-operator-controller-manager-54ccf4f85d-6zh6c" (UID: "84a649d7-4c8c-4da7-93b3-9e537b0207ee") : secret "infra-operator-webhook-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: I0122 06:03:48.256492 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.256685 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.256761 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:56.256742585 +0000 UTC m=+1097.095380588 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: I0122 06:03:48.764718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:48 crc kubenswrapper[4982]: I0122 06:03:48.764826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.764963 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.765044 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:56.765025684 +0000 UTC m=+1097.603663687 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.765054 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:48 crc kubenswrapper[4982]: E0122 06:03:48.765136 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:03:56.765111256 +0000 UTC m=+1097.603749259 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:54 crc kubenswrapper[4982]: E0122 06:03:54.781229 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf" Jan 22 06:03:54 crc kubenswrapper[4982]: E0122 06:03:54.784674 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bbrjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-55db956ddc-284c4_openstack-operators(099ae039-177c-4335-a611-990dcdf9c655): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:03:54 crc kubenswrapper[4982]: E0122 06:03:54.787756 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" podUID="099ae039-177c-4335-a611-990dcdf9c655" Jan 22 06:03:54 crc kubenswrapper[4982]: E0122 06:03:54.895139 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" podUID="099ae039-177c-4335-a611-990dcdf9c655" Jan 22 06:03:55 crc kubenswrapper[4982]: E0122 06:03:55.990096 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831" Jan 22 06:03:55 crc kubenswrapper[4982]: E0122 06:03:55.990314 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vp9rq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-6b8bc8d87d-rk69w_openstack-operators(7cbf8f35-c5cf-4e1c-8718-023380d9ac26): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:03:55 crc kubenswrapper[4982]: E0122 06:03:55.991581 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" podUID="7cbf8f35-c5cf-4e1c-8718-023380d9ac26" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.001537 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.008522 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84a649d7-4c8c-4da7-93b3-9e537b0207ee-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-6zh6c\" (UID: \"84a649d7-4c8c-4da7-93b3-9e537b0207ee\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.112818 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.307436 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.307572 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.307616 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert podName:47ed3df3-a23e-4021-b786-b99d1b710639 nodeName:}" failed. No retries permitted until 2026-01-22 06:04:12.307603799 +0000 UTC m=+1113.146241802 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert") pod "openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" (UID: "47ed3df3-a23e-4021-b786-b99d1b710639") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.525579 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c"] Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.814718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.814842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.815298 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.815402 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:04:12.815372213 +0000 UTC m=+1113.654010216 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "metrics-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.815492 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.815532 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs podName:d6294b08-62b7-465e-a5af-08f9bf1e5ff8 nodeName:}" failed. No retries permitted until 2026-01-22 06:04:12.815522988 +0000 UTC m=+1113.654161071 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs") pod "openstack-operator-controller-manager-647bb87bbd-fvz54" (UID: "d6294b08-62b7-465e-a5af-08f9bf1e5ff8") : secret "webhook-server-cert" not found Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.912416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" event={"ID":"e884b772-f17f-410c-9a2a-1b87fcda735b","Type":"ContainerStarted","Data":"55da501c9c2e32ec1256313adac80dff121ac6e8900c9b3098f29f6f14db605f"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.912506 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.916762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" event={"ID":"99845353-4c2d-4caa-bf12-c396d6b91a82","Type":"ContainerStarted","Data":"d8d66dc55cfcb65cb8568ef8c5dcec7eb01b079757bdf821d8fa56294f3f6e14"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.916899 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.920591 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" event={"ID":"1fa2993a-2231-445d-aa77-7190fb3a8fcb","Type":"ContainerStarted","Data":"efb9c189ea0cb4d5593064248e9871babd17c7aa1bca30dbb38f0cebffe15a39"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.921237 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.922529 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" event={"ID":"a1825b14-129b-459a-b08b-7e62c7f2414a","Type":"ContainerStarted","Data":"f8cf01242ccc69a40a13f1867d4b37b7117f597c3665bb59e4f782dd6702724c"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.922900 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.929219 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" event={"ID":"a7fe3154-ef99-4ce9-9151-605f734269f1","Type":"ContainerStarted","Data":"cd6aa89240e5fa567f4a53d7c92e3e9372a33abaac9efe57072cdccb78557cf2"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.930029 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.935623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" event={"ID":"4c78936d-aa34-45c2-8e85-67f6de306d0f","Type":"ContainerStarted","Data":"0960b3cb54b84c0783e4f39e5749ec5556713f6812d824158dff96ec5f3510ea"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.935703 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.938568 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" event={"ID":"bca8ffde-e486-4000-8d0b-39a275b64803","Type":"ContainerStarted","Data":"6f76d39570f5e4675cd36228182465dbdb63c205efc733f8104cdfbf6a414e47"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.938677 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.942211 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" podStartSLOduration=3.052769489 podStartE2EDuration="16.942194141s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.081034745 +0000 UTC m=+1082.919672748" lastFinishedPulling="2026-01-22 06:03:55.970459386 +0000 UTC m=+1096.809097400" observedRunningTime="2026-01-22 06:03:56.937789392 +0000 UTC m=+1097.776427395" watchObservedRunningTime="2026-01-22 06:03:56.942194141 +0000 UTC m=+1097.780832144" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.943046 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" event={"ID":"ca35f48e-eddd-46f2-8369-f1e642432834","Type":"ContainerStarted","Data":"83301511fa7c0eb092d6b1c6fb144f29f1943146e068bef3712d79ca1cb203de"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.943151 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.944508 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" event={"ID":"84a649d7-4c8c-4da7-93b3-9e537b0207ee","Type":"ContainerStarted","Data":"458eeeaf1a965b1b3a7351b594b691df07906fa0689e1357c54ac9fca91b751b"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.950975 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" event={"ID":"6d6c68f5-4111-471b-875c-0d498c4b046d","Type":"ContainerStarted","Data":"e7807acd330ccb2f5cdca75ff6a27a4000016270db9131a36472185abed3e32f"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.951671 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.957557 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" event={"ID":"18f9bc75-69ce-4299-ab4a-c280781b056c","Type":"ContainerStarted","Data":"9e0aa8cf6071d4d70876d0fae248c5c79afc6ad5617a4e179263845d30a0efc2"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.958243 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.959524 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" event={"ID":"ef693655-09f3-4809-a4b2-8930551fb3f1","Type":"ContainerStarted","Data":"8cea5ff6bbc9e6894bafa0afdc6b42984dd5587cb61e0529cf07729374be7a7a"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.959924 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.961583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" event={"ID":"afdd5f3a-706e-4f4f-930e-c952e4b0c6dc","Type":"ContainerStarted","Data":"dc846711ccdfd17da1ec3d4891cde5100f172e09eb65d87886a421272e763cab"} Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.961756 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:03:56 crc kubenswrapper[4982]: E0122 06:03:56.967125 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831\\\"\"" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" podUID="7cbf8f35-c5cf-4e1c-8718-023380d9ac26" Jan 22 06:03:56 crc kubenswrapper[4982]: I0122 06:03:56.979553 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" podStartSLOduration=2.995371027 podStartE2EDuration="16.97952691s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:41.987017133 +0000 UTC m=+1082.825655136" lastFinishedPulling="2026-01-22 06:03:55.971173006 +0000 UTC m=+1096.809811019" observedRunningTime="2026-01-22 06:03:56.975347528 +0000 UTC m=+1097.813985531" watchObservedRunningTime="2026-01-22 06:03:56.97952691 +0000 UTC m=+1097.818164913" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.006274 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" podStartSLOduration=3.475861417 podStartE2EDuration="18.006256033s" podCreationTimestamp="2026-01-22 06:03:39 +0000 UTC" firstStartedPulling="2026-01-22 06:03:41.443077512 +0000 UTC m=+1082.281715515" lastFinishedPulling="2026-01-22 06:03:55.973472128 +0000 UTC m=+1096.812110131" observedRunningTime="2026-01-22 06:03:57.001664048 +0000 UTC m=+1097.840302051" watchObservedRunningTime="2026-01-22 06:03:57.006256033 +0000 UTC m=+1097.844894026" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.076079 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" podStartSLOduration=3.28403753 podStartE2EDuration="17.07606319s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.177988805 +0000 UTC m=+1083.016626808" lastFinishedPulling="2026-01-22 06:03:55.970014425 +0000 UTC m=+1096.808652468" observedRunningTime="2026-01-22 06:03:57.062892413 +0000 UTC m=+1097.901530426" watchObservedRunningTime="2026-01-22 06:03:57.07606319 +0000 UTC m=+1097.914701193" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.093997 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" podStartSLOduration=3.219071765 podStartE2EDuration="17.093982134s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.099028681 +0000 UTC m=+1082.937666684" lastFinishedPulling="2026-01-22 06:03:55.97393901 +0000 UTC m=+1096.812577053" observedRunningTime="2026-01-22 06:03:57.090612993 +0000 UTC m=+1097.929250996" watchObservedRunningTime="2026-01-22 06:03:57.093982134 +0000 UTC m=+1097.932620127" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.107099 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" podStartSLOduration=3.160686187 podStartE2EDuration="17.107081558s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.089591896 +0000 UTC m=+1082.928229889" lastFinishedPulling="2026-01-22 06:03:56.035987257 +0000 UTC m=+1096.874625260" observedRunningTime="2026-01-22 06:03:57.104994431 +0000 UTC m=+1097.943632434" watchObservedRunningTime="2026-01-22 06:03:57.107081558 +0000 UTC m=+1097.945719561" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.144991 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" podStartSLOduration=4.150037388 podStartE2EDuration="18.144973392s" podCreationTimestamp="2026-01-22 06:03:39 +0000 UTC" firstStartedPulling="2026-01-22 06:03:41.976076967 +0000 UTC m=+1082.814714970" lastFinishedPulling="2026-01-22 06:03:55.971012971 +0000 UTC m=+1096.809650974" observedRunningTime="2026-01-22 06:03:57.122105224 +0000 UTC m=+1097.960743227" watchObservedRunningTime="2026-01-22 06:03:57.144973392 +0000 UTC m=+1097.983611395" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.146535 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" podStartSLOduration=3.531875259 podStartE2EDuration="17.146528764s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.386403899 +0000 UTC m=+1083.225041902" lastFinishedPulling="2026-01-22 06:03:56.001057364 +0000 UTC m=+1096.839695407" observedRunningTime="2026-01-22 06:03:57.140671766 +0000 UTC m=+1097.979309779" watchObservedRunningTime="2026-01-22 06:03:57.146528764 +0000 UTC m=+1097.985166767" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.161002 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" podStartSLOduration=3.282257843 podStartE2EDuration="17.160985615s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.089285388 +0000 UTC m=+1082.927923391" lastFinishedPulling="2026-01-22 06:03:55.96801315 +0000 UTC m=+1096.806651163" observedRunningTime="2026-01-22 06:03:57.158371884 +0000 UTC m=+1097.997009887" watchObservedRunningTime="2026-01-22 06:03:57.160985615 +0000 UTC m=+1097.999623608" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.221932 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" podStartSLOduration=3.242060716 podStartE2EDuration="17.221913142s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:41.995074951 +0000 UTC m=+1082.833712944" lastFinishedPulling="2026-01-22 06:03:55.974927327 +0000 UTC m=+1096.813565370" observedRunningTime="2026-01-22 06:03:57.209322861 +0000 UTC m=+1098.047960864" watchObservedRunningTime="2026-01-22 06:03:57.221913142 +0000 UTC m=+1098.060551145" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.288334 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" podStartSLOduration=3.369459859 podStartE2EDuration="17.288310916s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.058789403 +0000 UTC m=+1082.897427406" lastFinishedPulling="2026-01-22 06:03:55.97764042 +0000 UTC m=+1096.816278463" observedRunningTime="2026-01-22 06:03:57.25367428 +0000 UTC m=+1098.092312283" watchObservedRunningTime="2026-01-22 06:03:57.288310916 +0000 UTC m=+1098.126948919" Jan 22 06:03:57 crc kubenswrapper[4982]: I0122 06:03:57.298725 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" podStartSLOduration=3.77120806 podStartE2EDuration="18.298707297s" podCreationTimestamp="2026-01-22 06:03:39 +0000 UTC" firstStartedPulling="2026-01-22 06:03:41.4419065 +0000 UTC m=+1082.280544503" lastFinishedPulling="2026-01-22 06:03:55.969405727 +0000 UTC m=+1096.808043740" observedRunningTime="2026-01-22 06:03:57.283314181 +0000 UTC m=+1098.121952194" watchObservedRunningTime="2026-01-22 06:03:57.298707297 +0000 UTC m=+1098.137345300" Jan 22 06:03:59 crc kubenswrapper[4982]: I0122 06:03:59.724991 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:04:01 crc kubenswrapper[4982]: I0122 06:04:01.025928 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-k7ffz" Jan 22 06:04:09 crc kubenswrapper[4982]: E0122 06:04:09.930839 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b" Jan 22 06:04:09 crc kubenswrapper[4982]: E0122 06:04:09.931834 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7zs5v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5ffb9c6597-prb7h_openstack-operators(8b31e08d-b4f8-482f-b413-25897c734299): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:04:09 crc kubenswrapper[4982]: E0122 06:04:09.932969 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podUID="8b31e08d-b4f8-482f-b413-25897c734299" Jan 22 06:04:10 crc kubenswrapper[4982]: E0122 06:04:10.307896 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 22 06:04:10 crc kubenswrapper[4982]: E0122 06:04:10.308379 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zxwtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-4dbcc_openstack-operators(bd17bdc3-15ba-47d5-88f9-56336faa71bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:04:10 crc kubenswrapper[4982]: E0122 06:04:10.309610 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podUID="bd17bdc3-15ba-47d5-88f9-56336faa71bf" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.362872 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-22wjh" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.441042 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-7rhfh" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.444144 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-hstbj" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.444545 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-hjbqs" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.485552 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-f4l4h" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.506244 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-kxwmj" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.561735 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7zk22" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.562294 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-tklld" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.595290 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-6pr97" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.651159 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-f44cg" Jan 22 06:04:10 crc kubenswrapper[4982]: I0122 06:04:10.898146 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-9nfxp" Jan 22 06:04:11 crc kubenswrapper[4982]: E0122 06:04:11.076820 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922" Jan 22 06:04:11 crc kubenswrapper[4982]: E0122 06:04:11.078241 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vsq8k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-qgxkb_openstack-operators(3bde3f49-9b8e-4cb1-8eac-6eb047eda094): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:04:11 crc kubenswrapper[4982]: E0122 06:04:11.080287 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podUID="3bde3f49-9b8e-4cb1-8eac-6eb047eda094" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.335828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.345453 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/47ed3df3-a23e-4021-b786-b99d1b710639-cert\") pod \"openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g\" (UID: \"47ed3df3-a23e-4021-b786-b99d1b710639\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.621658 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:04:12 crc kubenswrapper[4982]: E0122 06:04:12.807365 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349" Jan 22 06:04:12 crc kubenswrapper[4982]: E0122 06:04:12.807582 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xfgjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-vkn5h_openstack-operators(d6afaf94-bf6d-4559-ab50-c2320aade035): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:04:12 crc kubenswrapper[4982]: E0122 06:04:12.809165 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podUID="d6afaf94-bf6d-4559-ab50-c2320aade035" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.842750 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.842868 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.852533 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-webhook-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.852706 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6294b08-62b7-465e-a5af-08f9bf1e5ff8-metrics-certs\") pod \"openstack-operator-controller-manager-647bb87bbd-fvz54\" (UID: \"d6294b08-62b7-465e-a5af-08f9bf1e5ff8\") " pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:12 crc kubenswrapper[4982]: I0122 06:04:12.864360 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:13 crc kubenswrapper[4982]: I0122 06:04:13.863135 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g"] Jan 22 06:04:13 crc kubenswrapper[4982]: W0122 06:04:13.873180 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47ed3df3_a23e_4021_b786_b99d1b710639.slice/crio-21e51b02ab5f29393e8575b239b1511167719630f1ac6708c5d4dd8ac8bca1b1 WatchSource:0}: Error finding container 21e51b02ab5f29393e8575b239b1511167719630f1ac6708c5d4dd8ac8bca1b1: Status 404 returned error can't find the container with id 21e51b02ab5f29393e8575b239b1511167719630f1ac6708c5d4dd8ac8bca1b1 Jan 22 06:04:13 crc kubenswrapper[4982]: I0122 06:04:13.924544 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54"] Jan 22 06:04:13 crc kubenswrapper[4982]: W0122 06:04:13.934428 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6294b08_62b7_465e_a5af_08f9bf1e5ff8.slice/crio-1985261c252396b89c57a311ea1034e330d93c337c5a7cb1c828f4ab51ce6c3b WatchSource:0}: Error finding container 1985261c252396b89c57a311ea1034e330d93c337c5a7cb1c828f4ab51ce6c3b: Status 404 returned error can't find the container with id 1985261c252396b89c57a311ea1034e330d93c337c5a7cb1c828f4ab51ce6c3b Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.135625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" event={"ID":"7cbf8f35-c5cf-4e1c-8718-023380d9ac26","Type":"ContainerStarted","Data":"a273dd521aaf2039e62c2c7dcf0effe1577ea6859afa6c5f3abab7fb42f7c84b"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.135981 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.137008 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" event={"ID":"099ae039-177c-4335-a611-990dcdf9c655","Type":"ContainerStarted","Data":"794558d7a647b33a13a6b2afb173e055de666fad5a477759152dc545a626b295"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.137216 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.137698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" event={"ID":"47ed3df3-a23e-4021-b786-b99d1b710639","Type":"ContainerStarted","Data":"21e51b02ab5f29393e8575b239b1511167719630f1ac6708c5d4dd8ac8bca1b1"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.139638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" event={"ID":"e63b183e-a84a-4964-a082-5f7768d8c472","Type":"ContainerStarted","Data":"9acffdbd718701d65e37d49f27ef3c23854fae6afba95272d168627292e73d0a"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.139811 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.140958 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" event={"ID":"84a649d7-4c8c-4da7-93b3-9e537b0207ee","Type":"ContainerStarted","Data":"90e80622ee1cc0d6675791c6cae5f5318e192c86806c80847920c3bb56176ae0"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.141178 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.141816 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" event={"ID":"030e0626-7169-45ea-9981-78c910b04226","Type":"ContainerStarted","Data":"acd4f54402da6efc3c573c1a0ecf49ab3cc86c6b7c932d02bca265cb795b5f78"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.142041 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.155115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" event={"ID":"d6294b08-62b7-465e-a5af-08f9bf1e5ff8","Type":"ContainerStarted","Data":"c8e81347f24853ac3dff10a24bceb905cd27b4d7ffcf79435f91c139203d4f86"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.155169 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" event={"ID":"d6294b08-62b7-465e-a5af-08f9bf1e5ff8","Type":"ContainerStarted","Data":"1985261c252396b89c57a311ea1034e330d93c337c5a7cb1c828f4ab51ce6c3b"} Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.155465 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.155521 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" podStartSLOduration=2.756714186 podStartE2EDuration="34.155504533s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.177526292 +0000 UTC m=+1083.016164285" lastFinishedPulling="2026-01-22 06:04:13.576316639 +0000 UTC m=+1114.414954632" observedRunningTime="2026-01-22 06:04:14.151546746 +0000 UTC m=+1114.990184739" watchObservedRunningTime="2026-01-22 06:04:14.155504533 +0000 UTC m=+1114.994142536" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.195826 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" podStartSLOduration=17.200239347 podStartE2EDuration="34.195811303s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:56.549540379 +0000 UTC m=+1097.388178382" lastFinishedPulling="2026-01-22 06:04:13.545112335 +0000 UTC m=+1114.383750338" observedRunningTime="2026-01-22 06:04:14.187689523 +0000 UTC m=+1115.026327526" watchObservedRunningTime="2026-01-22 06:04:14.195811303 +0000 UTC m=+1115.034449306" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.206955 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" podStartSLOduration=2.972079779 podStartE2EDuration="34.206926503s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.360707264 +0000 UTC m=+1083.199345267" lastFinishedPulling="2026-01-22 06:04:13.595553988 +0000 UTC m=+1114.434191991" observedRunningTime="2026-01-22 06:04:14.20236998 +0000 UTC m=+1115.041007983" watchObservedRunningTime="2026-01-22 06:04:14.206926503 +0000 UTC m=+1115.045564506" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.218696 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" podStartSLOduration=3.071750983 podStartE2EDuration="34.218683151s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.324986388 +0000 UTC m=+1083.163624401" lastFinishedPulling="2026-01-22 06:04:13.471918566 +0000 UTC m=+1114.310556569" observedRunningTime="2026-01-22 06:04:14.21570043 +0000 UTC m=+1115.054338433" watchObservedRunningTime="2026-01-22 06:04:14.218683151 +0000 UTC m=+1115.057321154" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.235512 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" podStartSLOduration=2.763421147 podStartE2EDuration="34.235497925s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.09901458 +0000 UTC m=+1082.937652583" lastFinishedPulling="2026-01-22 06:04:13.571091338 +0000 UTC m=+1114.409729361" observedRunningTime="2026-01-22 06:04:14.233309636 +0000 UTC m=+1115.071947629" watchObservedRunningTime="2026-01-22 06:04:14.235497925 +0000 UTC m=+1115.074135928" Jan 22 06:04:14 crc kubenswrapper[4982]: I0122 06:04:14.272487 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" podStartSLOduration=34.272473435 podStartE2EDuration="34.272473435s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:04:14.265458845 +0000 UTC m=+1115.104096858" watchObservedRunningTime="2026-01-22 06:04:14.272473435 +0000 UTC m=+1115.111111438" Jan 22 06:04:16 crc kubenswrapper[4982]: I0122 06:04:16.177119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" event={"ID":"47ed3df3-a23e-4021-b786-b99d1b710639","Type":"ContainerStarted","Data":"a760d1898d5011caccc317c24822a01bc828ad712f3184c9229354437f297714"} Jan 22 06:04:16 crc kubenswrapper[4982]: I0122 06:04:16.177840 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:04:16 crc kubenswrapper[4982]: I0122 06:04:16.207651 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" podStartSLOduration=34.342816007 podStartE2EDuration="36.207619609s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:04:13.876620816 +0000 UTC m=+1114.715258819" lastFinishedPulling="2026-01-22 06:04:15.741424418 +0000 UTC m=+1116.580062421" observedRunningTime="2026-01-22 06:04:16.203365584 +0000 UTC m=+1117.042003657" watchObservedRunningTime="2026-01-22 06:04:16.207619609 +0000 UTC m=+1117.046257652" Jan 22 06:04:20 crc kubenswrapper[4982]: I0122 06:04:20.791471 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-284c4" Jan 22 06:04:20 crc kubenswrapper[4982]: I0122 06:04:20.792424 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-t276t" Jan 22 06:04:20 crc kubenswrapper[4982]: I0122 06:04:20.920263 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-rk69w" Jan 22 06:04:21 crc kubenswrapper[4982]: I0122 06:04:21.007527 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-jmsqf" Jan 22 06:04:21 crc kubenswrapper[4982]: E0122 06:04:21.721769 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podUID="8b31e08d-b4f8-482f-b413-25897c734299" Jan 22 06:04:22 crc kubenswrapper[4982]: I0122 06:04:22.630942 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" Jan 22 06:04:22 crc kubenswrapper[4982]: E0122 06:04:22.721010 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podUID="bd17bdc3-15ba-47d5-88f9-56336faa71bf" Jan 22 06:04:22 crc kubenswrapper[4982]: I0122 06:04:22.871309 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-647bb87bbd-fvz54" Jan 22 06:04:24 crc kubenswrapper[4982]: E0122 06:04:24.722811 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podUID="3bde3f49-9b8e-4cb1-8eac-6eb047eda094" Jan 22 06:04:26 crc kubenswrapper[4982]: I0122 06:04:26.126749 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-6zh6c" Jan 22 06:04:26 crc kubenswrapper[4982]: E0122 06:04:26.728493 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podUID="d6afaf94-bf6d-4559-ab50-c2320aade035" Jan 22 06:04:36 crc kubenswrapper[4982]: I0122 06:04:36.338221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" event={"ID":"8b31e08d-b4f8-482f-b413-25897c734299","Type":"ContainerStarted","Data":"086511b11d1eb1eb1b21d4fb5989e5cc6283009cf1fb0afab9b9980ad36f1b40"} Jan 22 06:04:36 crc kubenswrapper[4982]: I0122 06:04:36.339621 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:04:36 crc kubenswrapper[4982]: I0122 06:04:36.368099 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" podStartSLOduration=3.167222473 podStartE2EDuration="56.368072941s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.424091077 +0000 UTC m=+1083.262729080" lastFinishedPulling="2026-01-22 06:04:35.624941505 +0000 UTC m=+1136.463579548" observedRunningTime="2026-01-22 06:04:36.362567832 +0000 UTC m=+1137.201205845" watchObservedRunningTime="2026-01-22 06:04:36.368072941 +0000 UTC m=+1137.206710984" Jan 22 06:04:39 crc kubenswrapper[4982]: I0122 06:04:39.371992 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" event={"ID":"3bde3f49-9b8e-4cb1-8eac-6eb047eda094","Type":"ContainerStarted","Data":"2843f7a51c9c7a271376b689caf1a7d9424e803de9800a33968bd99450d35207"} Jan 22 06:04:39 crc kubenswrapper[4982]: I0122 06:04:39.373099 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:04:39 crc kubenswrapper[4982]: I0122 06:04:39.375207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" event={"ID":"bd17bdc3-15ba-47d5-88f9-56336faa71bf","Type":"ContainerStarted","Data":"e3a07ce277b48c1c8f3171983d7b4bed8b35c658b346b3ca63500861f2b7f121"} Jan 22 06:04:39 crc kubenswrapper[4982]: I0122 06:04:39.397674 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" podStartSLOduration=3.559710762 podStartE2EDuration="59.397642766s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.428606349 +0000 UTC m=+1083.267244352" lastFinishedPulling="2026-01-22 06:04:38.266538353 +0000 UTC m=+1139.105176356" observedRunningTime="2026-01-22 06:04:39.395016535 +0000 UTC m=+1140.233654618" watchObservedRunningTime="2026-01-22 06:04:39.397642766 +0000 UTC m=+1140.236280809" Jan 22 06:04:39 crc kubenswrapper[4982]: I0122 06:04:39.431558 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbcc" podStartSLOduration=3.588804867 podStartE2EDuration="59.431528431s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.424625151 +0000 UTC m=+1083.263263154" lastFinishedPulling="2026-01-22 06:04:38.267348675 +0000 UTC m=+1139.105986718" observedRunningTime="2026-01-22 06:04:39.421340856 +0000 UTC m=+1140.259978889" watchObservedRunningTime="2026-01-22 06:04:39.431528431 +0000 UTC m=+1140.270166474" Jan 22 06:04:41 crc kubenswrapper[4982]: I0122 06:04:41.049050 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-prb7h" Jan 22 06:04:41 crc kubenswrapper[4982]: I0122 06:04:41.392801 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" event={"ID":"d6afaf94-bf6d-4559-ab50-c2320aade035","Type":"ContainerStarted","Data":"204a4035d18d5589aa9371fb7ab79ff9b72f4aebd1addbbd7b901ba9583aeaca"} Jan 22 06:04:41 crc kubenswrapper[4982]: I0122 06:04:41.393053 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:04:41 crc kubenswrapper[4982]: I0122 06:04:41.418530 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" podStartSLOduration=3.38653822 podStartE2EDuration="1m1.418510427s" podCreationTimestamp="2026-01-22 06:03:40 +0000 UTC" firstStartedPulling="2026-01-22 06:03:42.390327794 +0000 UTC m=+1083.228965797" lastFinishedPulling="2026-01-22 06:04:40.422299991 +0000 UTC m=+1141.260938004" observedRunningTime="2026-01-22 06:04:41.409870993 +0000 UTC m=+1142.248508996" watchObservedRunningTime="2026-01-22 06:04:41.418510427 +0000 UTC m=+1142.257148440" Jan 22 06:04:50 crc kubenswrapper[4982]: I0122 06:04:50.743331 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-vkn5h" Jan 22 06:04:50 crc kubenswrapper[4982]: I0122 06:04:50.859690 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-qgxkb" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.482749 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.484269 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.486816 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.487917 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-6m5hg" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.488144 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.489086 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.504712 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.596188 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.597238 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.599485 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.616394 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.619180 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r99m\" (UniqueName: \"kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.619306 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.719873 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q26tn\" (UniqueName: \"kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.720334 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.720432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.720527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.720614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r99m\" (UniqueName: \"kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.721211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.748235 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r99m\" (UniqueName: \"kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m\") pod \"dnsmasq-dns-84bb9d8bd9-m4ff6\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.798938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.822317 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q26tn\" (UniqueName: \"kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.822385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.822428 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.823359 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.823376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.845580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q26tn\" (UniqueName: \"kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn\") pod \"dnsmasq-dns-5f854695bc-k64h8\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:06 crc kubenswrapper[4982]: I0122 06:05:06.909727 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:07 crc kubenswrapper[4982]: I0122 06:05:07.256313 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:07 crc kubenswrapper[4982]: I0122 06:05:07.414876 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:07 crc kubenswrapper[4982]: W0122 06:05:07.416992 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f326f35_f6d1_4eb5_b91f_3ca61edd9cdc.slice/crio-5aa2e10eb6ac761a4a691e2d6031b0313f232f03184233cbeb7dc25c50ed7d2d WatchSource:0}: Error finding container 5aa2e10eb6ac761a4a691e2d6031b0313f232f03184233cbeb7dc25c50ed7d2d: Status 404 returned error can't find the container with id 5aa2e10eb6ac761a4a691e2d6031b0313f232f03184233cbeb7dc25c50ed7d2d Jan 22 06:05:07 crc kubenswrapper[4982]: I0122 06:05:07.608967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" event={"ID":"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc","Type":"ContainerStarted","Data":"5aa2e10eb6ac761a4a691e2d6031b0313f232f03184233cbeb7dc25c50ed7d2d"} Jan 22 06:05:07 crc kubenswrapper[4982]: I0122 06:05:07.610219 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" event={"ID":"ade17c62-e8a8-4fe7-814c-695de179d3e0","Type":"ContainerStarted","Data":"645d9c3220b4a7fda9a13699dac08a43f7e6de0a3605c0b918569de4addd10da"} Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.123465 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.168440 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.175041 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.186338 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.356677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.356752 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsqqr\" (UniqueName: \"kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.356794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.458153 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsqqr\" (UniqueName: \"kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.458228 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.458280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.459026 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.459083 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.493682 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsqqr\" (UniqueName: \"kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr\") pod \"dnsmasq-dns-744ffd65bc-4vmx6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.497964 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.501534 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.521203 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.522214 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.609817 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.664522 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j72zq\" (UniqueName: \"kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.664566 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.664608 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.769039 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j72zq\" (UniqueName: \"kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.769361 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.769416 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.770328 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.770841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.792043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j72zq\" (UniqueName: \"kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq\") pod \"dnsmasq-dns-95f5f6995-4qxqk\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:09 crc kubenswrapper[4982]: I0122 06:05:09.914230 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.140906 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.194349 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:05:10 crc kubenswrapper[4982]: W0122 06:05:10.200183 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd62a0c01_8c58_4e69_8d63_ef332d9171fe.slice/crio-dab2f5604ad0bb6ea259766a843362f9713161f034079481f02890b7e828c414 WatchSource:0}: Error finding container dab2f5604ad0bb6ea259766a843362f9713161f034079481f02890b7e828c414: Status 404 returned error can't find the container with id dab2f5604ad0bb6ea259766a843362f9713161f034079481f02890b7e828c414 Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.317201 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.319338 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.322571 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.322749 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.323195 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.323868 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.323973 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.324228 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gfqgm" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.343239 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.347842 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481515 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lb4s\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481601 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481622 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481656 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481675 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481756 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.481790 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583153 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583260 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lb4s\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583332 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583352 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583652 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.583946 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.584283 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.584639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.585307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.590580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.590592 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.591425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.599375 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.603157 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.613622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.619807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lb4s\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s\") pod \"rabbitmq-server-0\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.643634 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.665037 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" event={"ID":"50ce05bd-1924-4385-a2b2-f058926daab6","Type":"ContainerStarted","Data":"6eca0399621214091823d1ec5d3d55f94038efaf5d8548a37de609c2afbd5d79"} Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.667133 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" event={"ID":"d62a0c01-8c58-4e69-8d63-ef332d9171fe","Type":"ContainerStarted","Data":"dab2f5604ad0bb6ea259766a843362f9713161f034079481f02890b7e828c414"} Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.691422 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.692470 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.694545 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.694911 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jllq4" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.695109 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.695278 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.695398 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.695521 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.695637 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.709728 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.792486 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.792798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.792819 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.792972 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793007 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793025 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqpw8\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793044 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793062 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793115 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793143 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.793158 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.893990 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894053 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894098 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894136 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894160 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894176 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894221 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894264 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqpw8\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894278 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.894692 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.895706 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.895985 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.900656 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.901219 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.901803 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.896026 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.904103 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.906566 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.907331 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.915629 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqpw8\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:10 crc kubenswrapper[4982]: I0122 06:05:10.951106 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.045676 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.173667 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.488213 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.675635 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerStarted","Data":"7df6b506e283f74ec13809ebb9b8530f53ed35eea3c42c90f1517dfd93582813"} Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.877893 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.882475 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.886675 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.886721 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.890120 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.890541 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-m2mgh" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.891990 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 22 06:05:11 crc kubenswrapper[4982]: I0122 06:05:11.895959 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009181 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009207 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6c99\" (UniqueName: \"kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009243 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009273 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009353 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.009401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.110888 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.110958 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111002 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111048 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6c99\" (UniqueName: \"kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111071 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111102 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.111317 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.112140 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.112573 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.112632 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.113536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.119308 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.119615 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.182087 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6c99\" (UniqueName: \"kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.266057 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " pod="openstack/openstack-galera-0" Jan 22 06:05:12 crc kubenswrapper[4982]: I0122 06:05:12.515889 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.261865 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.263339 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.265313 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.265480 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-wn4dg" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.265476 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.267982 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.274491 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332163 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332247 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332276 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332301 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332320 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332411 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332435 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zdb7\" (UniqueName: \"kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.332479 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434107 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434284 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434314 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434448 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434473 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zdb7\" (UniqueName: \"kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434499 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.434769 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.435001 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.436013 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.436650 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.439666 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.440072 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.440841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.449540 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.458232 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zdb7\" (UniqueName: \"kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.467193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.590208 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.606073 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.607105 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.608949 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.610053 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.610266 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rmwx4" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.622630 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.646569 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.646677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.646925 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.646954 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.646987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69dwn\" (UniqueName: \"kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.748705 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.748771 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.748799 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.748826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69dwn\" (UniqueName: \"kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.748873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.749598 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.753139 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.755986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.756305 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.787485 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69dwn\" (UniqueName: \"kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn\") pod \"memcached-0\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " pod="openstack/memcached-0" Jan 22 06:05:13 crc kubenswrapper[4982]: I0122 06:05:13.923369 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.446147 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.447299 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.450082 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-xp54b" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.454791 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.577621 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx642\" (UniqueName: \"kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642\") pod \"kube-state-metrics-0\" (UID: \"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee\") " pod="openstack/kube-state-metrics-0" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.678469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx642\" (UniqueName: \"kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642\") pod \"kube-state-metrics-0\" (UID: \"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee\") " pod="openstack/kube-state-metrics-0" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.704356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx642\" (UniqueName: \"kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642\") pod \"kube-state-metrics-0\" (UID: \"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee\") " pod="openstack/kube-state-metrics-0" Jan 22 06:05:15 crc kubenswrapper[4982]: I0122 06:05:15.766463 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:05:18 crc kubenswrapper[4982]: I0122 06:05:18.731522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerStarted","Data":"6bcae2387f08466b835ab90ceae11cd06eaebd809d7f73f39e492c3ee8c39c3e"} Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.676010 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.677163 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.679842 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.680225 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.684427 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-8pvph" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.694777 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.736033 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.738321 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.745724 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.853715 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.854062 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.854471 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nll4v\" (UniqueName: \"kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.854989 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855013 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855034 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855106 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855147 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855184 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855214 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.855257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2xz5\" (UniqueName: \"kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956152 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956208 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nll4v\" (UniqueName: \"kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956340 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956366 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956387 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956429 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956488 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2xz5\" (UniqueName: \"kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.956508 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957153 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957323 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957454 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.957913 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.958290 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.958870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.959996 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.963809 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.970884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.972551 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nll4v\" (UniqueName: \"kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v\") pod \"ovn-controller-4z48g\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " pod="openstack/ovn-controller-4z48g" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.973999 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2xz5\" (UniqueName: \"kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5\") pod \"ovn-controller-ovs-74zx8\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:20 crc kubenswrapper[4982]: I0122 06:05:20.996903 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.087594 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.579358 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.582365 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.586095 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.586550 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.588282 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.589266 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-kshtb" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.590342 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.599092 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767082 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767134 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767206 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767230 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767335 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767448 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms8wf\" (UniqueName: \"kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.767535 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869166 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869562 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869602 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869430 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869669 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869797 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.869890 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms8wf\" (UniqueName: \"kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.870450 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.870608 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.871335 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.873431 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.874173 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.876224 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.887634 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms8wf\" (UniqueName: \"kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.898979 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:21 crc kubenswrapper[4982]: I0122 06:05:21.907988 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.758521 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.759749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.764474 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.764508 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.765001 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.765165 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-559b9" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.769497 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d94xx\" (UniqueName: \"kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904746 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904820 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904902 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904948 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.904995 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.905086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:22 crc kubenswrapper[4982]: I0122 06:05:22.905188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006750 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d94xx\" (UniqueName: \"kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006810 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006836 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006887 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006908 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006929 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006960 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.006989 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.007640 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.008912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.010181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.010804 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.012038 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.013616 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.023268 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.032937 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d94xx\" (UniqueName: \"kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.044030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:23 crc kubenswrapper[4982]: I0122 06:05:23.092140 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:26 crc kubenswrapper[4982]: I0122 06:05:26.283658 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 06:05:26 crc kubenswrapper[4982]: E0122 06:05:26.632704 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 06:05:26 crc kubenswrapper[4982]: E0122 06:05:26.632903 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7r99m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-m4ff6_openstack(ade17c62-e8a8-4fe7-814c-695de179d3e0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:05:26 crc kubenswrapper[4982]: E0122 06:05:26.634230 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" podUID="ade17c62-e8a8-4fe7-814c-695de179d3e0" Jan 22 06:05:27 crc kubenswrapper[4982]: W0122 06:05:27.524017 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f503dfc_13a3_4f48_8d84_c0b8a8e54b6e.slice/crio-1124733371d0c1be6191c0211899f63d41958d4127cb4c8023b3908a10873171 WatchSource:0}: Error finding container 1124733371d0c1be6191c0211899f63d41958d4127cb4c8023b3908a10873171: Status 404 returned error can't find the container with id 1124733371d0c1be6191c0211899f63d41958d4127cb4c8023b3908a10873171 Jan 22 06:05:27 crc kubenswrapper[4982]: E0122 06:05:27.595844 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 06:05:27 crc kubenswrapper[4982]: E0122 06:05:27.596003 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q26tn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-k64h8_openstack(8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:05:27 crc kubenswrapper[4982]: E0122 06:05:27.597287 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" podUID="8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.672403 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.787103 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config\") pod \"ade17c62-e8a8-4fe7-814c-695de179d3e0\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.787174 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r99m\" (UniqueName: \"kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m\") pod \"ade17c62-e8a8-4fe7-814c-695de179d3e0\" (UID: \"ade17c62-e8a8-4fe7-814c-695de179d3e0\") " Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.787775 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config" (OuterVolumeSpecName: "config") pod "ade17c62-e8a8-4fe7-814c-695de179d3e0" (UID: "ade17c62-e8a8-4fe7-814c-695de179d3e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.794696 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m" (OuterVolumeSpecName: "kube-api-access-7r99m") pod "ade17c62-e8a8-4fe7-814c-695de179d3e0" (UID: "ade17c62-e8a8-4fe7-814c-695de179d3e0"). InnerVolumeSpecName "kube-api-access-7r99m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.820365 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e","Type":"ContainerStarted","Data":"1124733371d0c1be6191c0211899f63d41958d4127cb4c8023b3908a10873171"} Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.821633 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.821651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-m4ff6" event={"ID":"ade17c62-e8a8-4fe7-814c-695de179d3e0","Type":"ContainerDied","Data":"645d9c3220b4a7fda9a13699dac08a43f7e6de0a3605c0b918569de4addd10da"} Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.891002 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ade17c62-e8a8-4fe7-814c-695de179d3e0-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.891028 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r99m\" (UniqueName: \"kubernetes.io/projected/ade17c62-e8a8-4fe7-814c-695de179d3e0-kube-api-access-7r99m\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.945749 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:27 crc kubenswrapper[4982]: I0122 06:05:27.952455 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-m4ff6"] Jan 22 06:05:28 crc kubenswrapper[4982]: E0122 06:05:28.110946 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd62a0c01_8c58_4e69_8d63_ef332d9171fe.slice/crio-0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd62a0c01_8c58_4e69_8d63_ef332d9171fe.slice/crio-conmon-0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709.scope\": RecentStats: unable to find data in memory cache]" Jan 22 06:05:28 crc kubenswrapper[4982]: W0122 06:05:28.361783 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29f503d7_a98b_4227_b9d9_865db16a2552.slice/crio-ed61b64b0d0ead28c0e517f76c165751287f4dbabc0f8cb505f59fcbad460db5 WatchSource:0}: Error finding container ed61b64b0d0ead28c0e517f76c165751287f4dbabc0f8cb505f59fcbad460db5: Status 404 returned error can't find the container with id ed61b64b0d0ead28c0e517f76c165751287f4dbabc0f8cb505f59fcbad460db5 Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.363595 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.381901 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.388958 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.394399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:05:28 crc kubenswrapper[4982]: W0122 06:05:28.400697 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode75b4d57_9a8d_4185_8dfe_9ce81eb81fee.slice/crio-c570c9ae50656c3a32d25903a644765fa5e6f11ec2ec972ceb7843463d07f83f WatchSource:0}: Error finding container c570c9ae50656c3a32d25903a644765fa5e6f11ec2ec972ceb7843463d07f83f: Status 404 returned error can't find the container with id c570c9ae50656c3a32d25903a644765fa5e6f11ec2ec972ceb7843463d07f83f Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.417966 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.492468 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.503091 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q26tn\" (UniqueName: \"kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn\") pod \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.503163 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config\") pod \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.503270 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc\") pod \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\" (UID: \"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc\") " Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.503973 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc" (UID: "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.504103 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config" (OuterVolumeSpecName: "config") pod "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc" (UID: "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.544895 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn" (OuterVolumeSpecName: "kube-api-access-q26tn") pod "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc" (UID: "8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc"). InnerVolumeSpecName "kube-api-access-q26tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:05:28 crc kubenswrapper[4982]: W0122 06:05:28.548189 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef18a198_b47b_4e19_a56b_7179e76a1e12.slice/crio-bc060a54f8f557a40abbece0adb977ab04db056f47bdec562138802ab72ea2ae WatchSource:0}: Error finding container bc060a54f8f557a40abbece0adb977ab04db056f47bdec562138802ab72ea2ae: Status 404 returned error can't find the container with id bc060a54f8f557a40abbece0adb977ab04db056f47bdec562138802ab72ea2ae Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.604904 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.604941 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q26tn\" (UniqueName: \"kubernetes.io/projected/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-kube-api-access-q26tn\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.604954 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.828631 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g" event={"ID":"29f503d7-a98b-4227-b9d9-865db16a2552","Type":"ContainerStarted","Data":"ed61b64b0d0ead28c0e517f76c165751287f4dbabc0f8cb505f59fcbad460db5"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.829392 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerStarted","Data":"6c7dbd60d0a9b0b4fe53b885330ca39643b037af623fb5a8496214d5dd718e6a"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.830424 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerStarted","Data":"5e9bada00b9fa32a56bda981a2d0e0381b1485890760f4c4185530e9aadbb490"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.832090 4982 generic.go:334] "Generic (PLEG): container finished" podID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerID="0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709" exitCode=0 Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.832159 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" event={"ID":"d62a0c01-8c58-4e69-8d63-ef332d9171fe","Type":"ContainerDied","Data":"0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.833482 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.833492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-k64h8" event={"ID":"8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc","Type":"ContainerDied","Data":"5aa2e10eb6ac761a4a691e2d6031b0313f232f03184233cbeb7dc25c50ed7d2d"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.845839 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerStarted","Data":"bc060a54f8f557a40abbece0adb977ab04db056f47bdec562138802ab72ea2ae"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.847817 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee","Type":"ContainerStarted","Data":"c570c9ae50656c3a32d25903a644765fa5e6f11ec2ec972ceb7843463d07f83f"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.851290 4982 generic.go:334] "Generic (PLEG): container finished" podID="50ce05bd-1924-4385-a2b2-f058926daab6" containerID="2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0" exitCode=0 Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.851335 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" event={"ID":"50ce05bd-1924-4385-a2b2-f058926daab6","Type":"ContainerDied","Data":"2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0"} Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.918628 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:28 crc kubenswrapper[4982]: I0122 06:05:28.925916 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-k64h8"] Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.519487 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:05:29 crc kubenswrapper[4982]: W0122 06:05:29.550239 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabef47d7_89c0_4c16_8474_8db3d2d127e7.slice/crio-7bc65f0839a686acb37f047307ab2839c7a8c568493c8cfa3c724b41c04d6baa WatchSource:0}: Error finding container 7bc65f0839a686acb37f047307ab2839c7a8c568493c8cfa3c724b41c04d6baa: Status 404 returned error can't find the container with id 7bc65f0839a686acb37f047307ab2839c7a8c568493c8cfa3c724b41c04d6baa Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.614602 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.734395 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc" path="/var/lib/kubelet/pods/8f326f35-f6d1-4eb5-b91f-3ca61edd9cdc/volumes" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.734932 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ade17c62-e8a8-4fe7-814c-695de179d3e0" path="/var/lib/kubelet/pods/ade17c62-e8a8-4fe7-814c-695de179d3e0/volumes" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.874431 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" event={"ID":"50ce05bd-1924-4385-a2b2-f058926daab6","Type":"ContainerStarted","Data":"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.875061 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.876275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerStarted","Data":"fae00728b8ff39acdf9ef7e07b72c9a4110abc5d73270e2a0dcb02d551bcf97f"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.878585 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerStarted","Data":"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.882991 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" event={"ID":"d62a0c01-8c58-4e69-8d63-ef332d9171fe","Type":"ContainerStarted","Data":"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.883573 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.888358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e","Type":"ContainerStarted","Data":"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.888434 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.890822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerStarted","Data":"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.892523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerStarted","Data":"7bc65f0839a686acb37f047307ab2839c7a8c568493c8cfa3c724b41c04d6baa"} Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.943083 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" podStartSLOduration=3.530597805 podStartE2EDuration="20.943058792s" podCreationTimestamp="2026-01-22 06:05:09 +0000 UTC" firstStartedPulling="2026-01-22 06:05:10.203340723 +0000 UTC m=+1171.041978726" lastFinishedPulling="2026-01-22 06:05:27.61580171 +0000 UTC m=+1188.454439713" observedRunningTime="2026-01-22 06:05:29.938456622 +0000 UTC m=+1190.777094625" watchObservedRunningTime="2026-01-22 06:05:29.943058792 +0000 UTC m=+1190.781696795" Jan 22 06:05:29 crc kubenswrapper[4982]: I0122 06:05:29.958087 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" podStartSLOduration=3.455867514 podStartE2EDuration="20.958041423s" podCreationTimestamp="2026-01-22 06:05:09 +0000 UTC" firstStartedPulling="2026-01-22 06:05:10.171351998 +0000 UTC m=+1171.009990001" lastFinishedPulling="2026-01-22 06:05:27.673525907 +0000 UTC m=+1188.512163910" observedRunningTime="2026-01-22 06:05:29.952451547 +0000 UTC m=+1190.791089560" watchObservedRunningTime="2026-01-22 06:05:29.958041423 +0000 UTC m=+1190.796679426" Jan 22 06:05:30 crc kubenswrapper[4982]: I0122 06:05:30.008289 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.982135698 podStartE2EDuration="17.008270713s" podCreationTimestamp="2026-01-22 06:05:13 +0000 UTC" firstStartedPulling="2026-01-22 06:05:27.546467001 +0000 UTC m=+1188.385105044" lastFinishedPulling="2026-01-22 06:05:29.572602036 +0000 UTC m=+1190.411240059" observedRunningTime="2026-01-22 06:05:29.995295695 +0000 UTC m=+1190.833933698" watchObservedRunningTime="2026-01-22 06:05:30.008270713 +0000 UTC m=+1190.846908716" Jan 22 06:05:34 crc kubenswrapper[4982]: I0122 06:05:34.504895 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:34 crc kubenswrapper[4982]: I0122 06:05:34.916059 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.005481 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.005756 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="dnsmasq-dns" containerID="cri-o://ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f" gracePeriod=10 Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.735542 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.834193 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config\") pod \"50ce05bd-1924-4385-a2b2-f058926daab6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.834283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc\") pod \"50ce05bd-1924-4385-a2b2-f058926daab6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.834302 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsqqr\" (UniqueName: \"kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr\") pod \"50ce05bd-1924-4385-a2b2-f058926daab6\" (UID: \"50ce05bd-1924-4385-a2b2-f058926daab6\") " Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.838959 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr" (OuterVolumeSpecName: "kube-api-access-gsqqr") pod "50ce05bd-1924-4385-a2b2-f058926daab6" (UID: "50ce05bd-1924-4385-a2b2-f058926daab6"). InnerVolumeSpecName "kube-api-access-gsqqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.937605 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsqqr\" (UniqueName: \"kubernetes.io/projected/50ce05bd-1924-4385-a2b2-f058926daab6-kube-api-access-gsqqr\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.955300 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config" (OuterVolumeSpecName: "config") pod "50ce05bd-1924-4385-a2b2-f058926daab6" (UID: "50ce05bd-1924-4385-a2b2-f058926daab6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.962411 4982 generic.go:334] "Generic (PLEG): container finished" podID="50ce05bd-1924-4385-a2b2-f058926daab6" containerID="ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f" exitCode=0 Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.962456 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" event={"ID":"50ce05bd-1924-4385-a2b2-f058926daab6","Type":"ContainerDied","Data":"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f"} Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.962480 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" event={"ID":"50ce05bd-1924-4385-a2b2-f058926daab6","Type":"ContainerDied","Data":"6eca0399621214091823d1ec5d3d55f94038efaf5d8548a37de609c2afbd5d79"} Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.962497 4982 scope.go:117] "RemoveContainer" containerID="ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.962647 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-4vmx6" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.972685 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "50ce05bd-1924-4385-a2b2-f058926daab6" (UID: "50ce05bd-1924-4385-a2b2-f058926daab6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:35 crc kubenswrapper[4982]: I0122 06:05:35.986542 4982 scope.go:117] "RemoveContainer" containerID="2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.015141 4982 scope.go:117] "RemoveContainer" containerID="ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f" Jan 22 06:05:36 crc kubenswrapper[4982]: E0122 06:05:36.015596 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f\": container with ID starting with ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f not found: ID does not exist" containerID="ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.015638 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f"} err="failed to get container status \"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f\": rpc error: code = NotFound desc = could not find container \"ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f\": container with ID starting with ba61cda4fa154bd082b41aa0d799dc85b6aff464f9a37a40167539d6956b033f not found: ID does not exist" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.015665 4982 scope.go:117] "RemoveContainer" containerID="2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0" Jan 22 06:05:36 crc kubenswrapper[4982]: E0122 06:05:36.016199 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0\": container with ID starting with 2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0 not found: ID does not exist" containerID="2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.016258 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0"} err="failed to get container status \"2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0\": rpc error: code = NotFound desc = could not find container \"2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0\": container with ID starting with 2048104f6ef9a9dd0ddbda0f353403b98a3dd28a53d5a6cf135e92f2476e2ab0 not found: ID does not exist" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.039460 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.039495 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50ce05bd-1924-4385-a2b2-f058926daab6-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.292130 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.298544 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-4vmx6"] Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.976575 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerStarted","Data":"44293f881aeb45c5c60b9c080fb0120c9f3a061e77ce519b40e24281772cc8b3"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.979744 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee","Type":"ContainerStarted","Data":"2ba701ffb7e81c8e8e48691e3fdbe161394306846afd9348c67bf422d21c6517"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.979845 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.982545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerStarted","Data":"44180703ae3b87e41617dc3bc742fc8099ef79e67de1a6b50b2b21f0fdc9f2bc"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.986477 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g" event={"ID":"29f503d7-a98b-4227-b9d9-865db16a2552","Type":"ContainerStarted","Data":"e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.986636 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4z48g" Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.988785 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerStarted","Data":"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.990668 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerStarted","Data":"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.992734 4982 generic.go:334] "Generic (PLEG): container finished" podID="e8478549-adf2-4e04-b404-f4882ed405d5" containerID="b532a25aee0797dbd740b203472f01ab6f2aa1c38b5e22da29a10d70d1898802" exitCode=0 Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.992769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerDied","Data":"b532a25aee0797dbd740b203472f01ab6f2aa1c38b5e22da29a10d70d1898802"} Jan 22 06:05:36 crc kubenswrapper[4982]: I0122 06:05:36.999618 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=14.523641428 podStartE2EDuration="21.999603118s" podCreationTimestamp="2026-01-22 06:05:15 +0000 UTC" firstStartedPulling="2026-01-22 06:05:28.411752177 +0000 UTC m=+1189.250390180" lastFinishedPulling="2026-01-22 06:05:35.887713867 +0000 UTC m=+1196.726351870" observedRunningTime="2026-01-22 06:05:36.994980687 +0000 UTC m=+1197.833618690" watchObservedRunningTime="2026-01-22 06:05:36.999603118 +0000 UTC m=+1197.838241121" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.086040 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4z48g" podStartSLOduration=10.461792576 podStartE2EDuration="17.086013552s" podCreationTimestamp="2026-01-22 06:05:20 +0000 UTC" firstStartedPulling="2026-01-22 06:05:28.363866518 +0000 UTC m=+1189.202504521" lastFinishedPulling="2026-01-22 06:05:34.988087494 +0000 UTC m=+1195.826725497" observedRunningTime="2026-01-22 06:05:37.074941883 +0000 UTC m=+1197.913579886" watchObservedRunningTime="2026-01-22 06:05:37.086013552 +0000 UTC m=+1197.924651585" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.740036 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" path="/var/lib/kubelet/pods/50ce05bd-1924-4385-a2b2-f058926daab6/volumes" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.765661 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:05:37 crc kubenswrapper[4982]: E0122 06:05:37.766202 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="dnsmasq-dns" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.766217 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="dnsmasq-dns" Jan 22 06:05:37 crc kubenswrapper[4982]: E0122 06:05:37.766248 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="init" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.766254 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="init" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.766473 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="50ce05bd-1924-4385-a2b2-f058926daab6" containerName="dnsmasq-dns" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.767323 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.772257 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.775186 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879578 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879652 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879687 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn4hg\" (UniqueName: \"kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.879870 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.882315 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7878659675-xq7rv"] Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.902626 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.902515 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7878659675-xq7rv"] Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.907579 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn4fj\" (UniqueName: \"kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981826 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981868 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981884 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn4hg\" (UniqueName: \"kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981900 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981931 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981953 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.981986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.982015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.982993 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.983037 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.983076 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.994612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:37 crc kubenswrapper[4982]: I0122 06:05:37.995833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.010216 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn4hg\" (UniqueName: \"kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg\") pod \"ovn-controller-metrics-w7b5x\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.057819 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerStarted","Data":"d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66"} Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.057906 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerStarted","Data":"8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b"} Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.058424 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.058493 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.084380 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.084729 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.084972 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn4fj\" (UniqueName: \"kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.085062 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.087059 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.087653 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.088560 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.093983 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-xq7rv"] Jan 22 06:05:38 crc kubenswrapper[4982]: E0122 06:05:38.096827 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-rn4fj], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7878659675-xq7rv" podUID="f854a6db-39c2-49de-9b68-51bc7771c604" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.104226 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.105117 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.111338 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.113451 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.113954 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.148043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn4fj\" (UniqueName: \"kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj\") pod \"dnsmasq-dns-7878659675-xq7rv\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.162149 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-74zx8" podStartSLOduration=12.807961972 podStartE2EDuration="18.16212944s" podCreationTimestamp="2026-01-22 06:05:20 +0000 UTC" firstStartedPulling="2026-01-22 06:05:29.631986076 +0000 UTC m=+1190.470624099" lastFinishedPulling="2026-01-22 06:05:34.986153564 +0000 UTC m=+1195.824791567" observedRunningTime="2026-01-22 06:05:38.134588601 +0000 UTC m=+1198.973226604" watchObservedRunningTime="2026-01-22 06:05:38.16212944 +0000 UTC m=+1199.000767443" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.187189 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.187275 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.187373 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.187398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rmpv\" (UniqueName: \"kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.187420 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.290808 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.291162 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.291198 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.291215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rmpv\" (UniqueName: \"kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.291235 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.292142 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.292761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.293295 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.293642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.329893 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rmpv\" (UniqueName: \"kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv\") pod \"dnsmasq-dns-586b989cdc-5sv75\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.529200 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.739552 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:05:38 crc kubenswrapper[4982]: I0122 06:05:38.926583 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.030808 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:39 crc kubenswrapper[4982]: W0122 06:05:39.044554 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dde00b_2101_40d6_a327_2c541417b11e.slice/crio-6b790217a310e6802c5453fcea127ee9937f2a327afb9ff2dea5eafa521605f4 WatchSource:0}: Error finding container 6b790217a310e6802c5453fcea127ee9937f2a327afb9ff2dea5eafa521605f4: Status 404 returned error can't find the container with id 6b790217a310e6802c5453fcea127ee9937f2a327afb9ff2dea5eafa521605f4 Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.063348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" event={"ID":"13dde00b-2101-40d6-a327-2c541417b11e","Type":"ContainerStarted","Data":"6b790217a310e6802c5453fcea127ee9937f2a327afb9ff2dea5eafa521605f4"} Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.065352 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w7b5x" event={"ID":"98d29241-70c5-488d-826a-c37971689e5c","Type":"ContainerStarted","Data":"35ff9eb84925d839a1b52a49f3acab8b9287bedc5e9f612fb4170721d6167c29"} Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.065417 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.077929 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.212190 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc\") pod \"f854a6db-39c2-49de-9b68-51bc7771c604\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.212292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn4fj\" (UniqueName: \"kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj\") pod \"f854a6db-39c2-49de-9b68-51bc7771c604\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.212822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f854a6db-39c2-49de-9b68-51bc7771c604" (UID: "f854a6db-39c2-49de-9b68-51bc7771c604"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.213324 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb\") pod \"f854a6db-39c2-49de-9b68-51bc7771c604\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.213447 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config\") pod \"f854a6db-39c2-49de-9b68-51bc7771c604\" (UID: \"f854a6db-39c2-49de-9b68-51bc7771c604\") " Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.213709 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f854a6db-39c2-49de-9b68-51bc7771c604" (UID: "f854a6db-39c2-49de-9b68-51bc7771c604"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.214010 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config" (OuterVolumeSpecName: "config") pod "f854a6db-39c2-49de-9b68-51bc7771c604" (UID: "f854a6db-39c2-49de-9b68-51bc7771c604"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.214582 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.214613 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.214630 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f854a6db-39c2-49de-9b68-51bc7771c604-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.222510 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj" (OuterVolumeSpecName: "kube-api-access-rn4fj") pod "f854a6db-39c2-49de-9b68-51bc7771c604" (UID: "f854a6db-39c2-49de-9b68-51bc7771c604"). InnerVolumeSpecName "kube-api-access-rn4fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:05:39 crc kubenswrapper[4982]: I0122 06:05:39.316384 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn4fj\" (UniqueName: \"kubernetes.io/projected/f854a6db-39c2-49de-9b68-51bc7771c604-kube-api-access-rn4fj\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:40 crc kubenswrapper[4982]: I0122 06:05:40.073781 4982 generic.go:334] "Generic (PLEG): container finished" podID="13dde00b-2101-40d6-a327-2c541417b11e" containerID="422bf6d6fc922d9e710e3871431952674e63b2e857d84cbc298a900b828522d1" exitCode=0 Jan 22 06:05:40 crc kubenswrapper[4982]: I0122 06:05:40.073889 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" event={"ID":"13dde00b-2101-40d6-a327-2c541417b11e","Type":"ContainerDied","Data":"422bf6d6fc922d9e710e3871431952674e63b2e857d84cbc298a900b828522d1"} Jan 22 06:05:40 crc kubenswrapper[4982]: I0122 06:05:40.074183 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-xq7rv" Jan 22 06:05:40 crc kubenswrapper[4982]: I0122 06:05:40.122994 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-xq7rv"] Jan 22 06:05:40 crc kubenswrapper[4982]: I0122 06:05:40.127891 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7878659675-xq7rv"] Jan 22 06:05:41 crc kubenswrapper[4982]: I0122 06:05:41.081486 4982 generic.go:334] "Generic (PLEG): container finished" podID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerID="b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182" exitCode=0 Jan 22 06:05:41 crc kubenswrapper[4982]: I0122 06:05:41.081546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerDied","Data":"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182"} Jan 22 06:05:41 crc kubenswrapper[4982]: I0122 06:05:41.086428 4982 generic.go:334] "Generic (PLEG): container finished" podID="8c227aae-c987-448b-9545-febc060f0929" containerID="6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3" exitCode=0 Jan 22 06:05:41 crc kubenswrapper[4982]: I0122 06:05:41.086465 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerDied","Data":"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3"} Jan 22 06:05:41 crc kubenswrapper[4982]: I0122 06:05:41.728594 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f854a6db-39c2-49de-9b68-51bc7771c604" path="/var/lib/kubelet/pods/f854a6db-39c2-49de-9b68-51bc7771c604/volumes" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.095749 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" event={"ID":"13dde00b-2101-40d6-a327-2c541417b11e","Type":"ContainerStarted","Data":"cfbe1ff06df0faebb6d2f6c94e83a17d46ee1f046945a504cf2850bcea7f274b"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.096066 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.100197 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerStarted","Data":"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.102117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerStarted","Data":"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.103437 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w7b5x" event={"ID":"98d29241-70c5-488d-826a-c37971689e5c","Type":"ContainerStarted","Data":"62e9a0e84f780bc9d5109a47a1a3b4b18ffc53274df69cc65efda0d894c70ec0"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.105245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerStarted","Data":"2e5d780276258df492d4681170a8192ba89d1859402687fc4d4b1699560bb858"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.107756 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerStarted","Data":"7ba38d343082ea5a0fb5e591849b42a158f94d9fd61d91a4c98690174b1433f4"} Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.126932 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" podStartSLOduration=4.126912217 podStartE2EDuration="4.126912217s" podCreationTimestamp="2026-01-22 06:05:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:05:42.125111549 +0000 UTC m=+1202.963749562" watchObservedRunningTime="2026-01-22 06:05:42.126912217 +0000 UTC m=+1202.965550240" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.162438 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.356021115 podStartE2EDuration="21.162418553s" podCreationTimestamp="2026-01-22 06:05:21 +0000 UTC" firstStartedPulling="2026-01-22 06:05:28.555902669 +0000 UTC m=+1189.394540672" lastFinishedPulling="2026-01-22 06:05:41.362300107 +0000 UTC m=+1202.200938110" observedRunningTime="2026-01-22 06:05:42.157092134 +0000 UTC m=+1202.995730147" watchObservedRunningTime="2026-01-22 06:05:42.162418553 +0000 UTC m=+1203.001056556" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.184582 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=23.61840443 podStartE2EDuration="30.184566261s" podCreationTimestamp="2026-01-22 06:05:12 +0000 UTC" firstStartedPulling="2026-01-22 06:05:28.420015363 +0000 UTC m=+1189.258653356" lastFinishedPulling="2026-01-22 06:05:34.986177184 +0000 UTC m=+1195.824815187" observedRunningTime="2026-01-22 06:05:42.180410752 +0000 UTC m=+1203.019048765" watchObservedRunningTime="2026-01-22 06:05:42.184566261 +0000 UTC m=+1203.023204254" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.214803 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.388494634 podStartE2EDuration="22.21478367s" podCreationTimestamp="2026-01-22 06:05:20 +0000 UTC" firstStartedPulling="2026-01-22 06:05:29.56507586 +0000 UTC m=+1190.403713893" lastFinishedPulling="2026-01-22 06:05:41.391364916 +0000 UTC m=+1202.230002929" observedRunningTime="2026-01-22 06:05:42.208708791 +0000 UTC m=+1203.047346794" watchObservedRunningTime="2026-01-22 06:05:42.21478367 +0000 UTC m=+1203.053421673" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.258451 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=25.468011656 podStartE2EDuration="32.258431038s" podCreationTimestamp="2026-01-22 06:05:10 +0000 UTC" firstStartedPulling="2026-01-22 06:05:28.376645172 +0000 UTC m=+1189.215283175" lastFinishedPulling="2026-01-22 06:05:35.167064554 +0000 UTC m=+1196.005702557" observedRunningTime="2026-01-22 06:05:42.239834893 +0000 UTC m=+1203.078472906" watchObservedRunningTime="2026-01-22 06:05:42.258431038 +0000 UTC m=+1203.097069041" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.260767 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-w7b5x" podStartSLOduration=2.655942355 podStartE2EDuration="5.260755569s" podCreationTimestamp="2026-01-22 06:05:37 +0000 UTC" firstStartedPulling="2026-01-22 06:05:38.747235976 +0000 UTC m=+1199.585873979" lastFinishedPulling="2026-01-22 06:05:41.35204918 +0000 UTC m=+1202.190687193" observedRunningTime="2026-01-22 06:05:42.255545263 +0000 UTC m=+1203.094183276" watchObservedRunningTime="2026-01-22 06:05:42.260755569 +0000 UTC m=+1203.099393572" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.516439 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.516493 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.908629 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:42 crc kubenswrapper[4982]: I0122 06:05:42.948559 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:43 crc kubenswrapper[4982]: I0122 06:05:43.093243 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:43 crc kubenswrapper[4982]: I0122 06:05:43.115556 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:43 crc kubenswrapper[4982]: I0122 06:05:43.223925 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 22 06:05:43 crc kubenswrapper[4982]: I0122 06:05:43.590864 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:43 crc kubenswrapper[4982]: I0122 06:05:43.591231 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:44 crc kubenswrapper[4982]: I0122 06:05:44.093246 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:44 crc kubenswrapper[4982]: I0122 06:05:44.135698 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.191139 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.434606 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.435820 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.441064 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.441263 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-7nv55" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.441385 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.441585 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.463677 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.547786 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.547903 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ncwj\" (UniqueName: \"kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.547946 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.547970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.547989 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.548010 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.548041 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649360 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ncwj\" (UniqueName: \"kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649536 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.649570 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.650056 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.650462 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.650926 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.656837 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.661686 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.666513 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.666641 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ncwj\" (UniqueName: \"kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj\") pod \"ovn-northd-0\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.760183 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.769895 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.822781 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.833511 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="dnsmasq-dns" containerID="cri-o://cfbe1ff06df0faebb6d2f6c94e83a17d46ee1f046945a504cf2850bcea7f274b" gracePeriod=10 Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.877523 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.878934 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.892655 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.955645 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.955749 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.955775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.955802 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:45 crc kubenswrapper[4982]: I0122 06:05:45.955836 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm6rn\" (UniqueName: \"kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.057720 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.057773 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.057804 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.057842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm6rn\" (UniqueName: \"kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.057885 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.059023 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.059204 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.059301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.059547 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.084302 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm6rn\" (UniqueName: \"kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn\") pod \"dnsmasq-dns-67fdf7998c-xxcdd\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.137207 4982 generic.go:334] "Generic (PLEG): container finished" podID="13dde00b-2101-40d6-a327-2c541417b11e" containerID="cfbe1ff06df0faebb6d2f6c94e83a17d46ee1f046945a504cf2850bcea7f274b" exitCode=0 Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.137235 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" event={"ID":"13dde00b-2101-40d6-a327-2c541417b11e","Type":"ContainerDied","Data":"cfbe1ff06df0faebb6d2f6c94e83a17d46ee1f046945a504cf2850bcea7f274b"} Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.217078 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.329317 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:05:46 crc kubenswrapper[4982]: W0122 06:05:46.336021 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ce93171_ff81_4a46_9813_2807930a945c.slice/crio-8bc005981d51764ef9c8824a7b1f3535840fc641063c2d7fb9f5188b0f3357a3 WatchSource:0}: Error finding container 8bc005981d51764ef9c8824a7b1f3535840fc641063c2d7fb9f5188b0f3357a3: Status 404 returned error can't find the container with id 8bc005981d51764ef9c8824a7b1f3535840fc641063c2d7fb9f5188b0f3357a3 Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.731972 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:05:46 crc kubenswrapper[4982]: W0122 06:05:46.735319 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5d4bc07_60ec_4fc9_9725_d80b94777e28.slice/crio-d0c9a5aec612d324396014a61b246930d83e9078d0a9f1d020de34b563a12513 WatchSource:0}: Error finding container d0c9a5aec612d324396014a61b246930d83e9078d0a9f1d020de34b563a12513: Status 404 returned error can't find the container with id d0c9a5aec612d324396014a61b246930d83e9078d0a9f1d020de34b563a12513 Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.944784 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.958717 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.958814 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.961188 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.962188 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wzjr6" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.962570 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 22 06:05:46 crc kubenswrapper[4982]: I0122 06:05:46.962722 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.082952 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.090925 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.090998 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2dmc\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.091193 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.091255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.091322 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.092603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.153261 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerStarted","Data":"8bc005981d51764ef9c8824a7b1f3535840fc641063c2d7fb9f5188b0f3357a3"} Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.154724 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" event={"ID":"13dde00b-2101-40d6-a327-2c541417b11e","Type":"ContainerDied","Data":"6b790217a310e6802c5453fcea127ee9937f2a327afb9ff2dea5eafa521605f4"} Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.154757 4982 scope.go:117] "RemoveContainer" containerID="cfbe1ff06df0faebb6d2f6c94e83a17d46ee1f046945a504cf2850bcea7f274b" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.154882 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-5sv75" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.161733 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" event={"ID":"e5d4bc07-60ec-4fc9-9725-d80b94777e28","Type":"ContainerStarted","Data":"d0c9a5aec612d324396014a61b246930d83e9078d0a9f1d020de34b563a12513"} Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.190488 4982 scope.go:117] "RemoveContainer" containerID="422bf6d6fc922d9e710e3871431952674e63b2e857d84cbc298a900b828522d1" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.194462 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb\") pod \"13dde00b-2101-40d6-a327-2c541417b11e\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.194514 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc\") pod \"13dde00b-2101-40d6-a327-2c541417b11e\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.194627 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rmpv\" (UniqueName: \"kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv\") pod \"13dde00b-2101-40d6-a327-2c541417b11e\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.194660 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config\") pod \"13dde00b-2101-40d6-a327-2c541417b11e\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.194714 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb\") pod \"13dde00b-2101-40d6-a327-2c541417b11e\" (UID: \"13dde00b-2101-40d6-a327-2c541417b11e\") " Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2dmc\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195446 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.195654 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.195971 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.195993 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.196041 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:05:47.696021698 +0000 UTC m=+1208.534659701 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.198001 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.200351 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.203039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.204978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.209693 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv" (OuterVolumeSpecName: "kube-api-access-2rmpv") pod "13dde00b-2101-40d6-a327-2c541417b11e" (UID: "13dde00b-2101-40d6-a327-2c541417b11e"). InnerVolumeSpecName "kube-api-access-2rmpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.218151 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2dmc\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.231882 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.243660 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config" (OuterVolumeSpecName: "config") pod "13dde00b-2101-40d6-a327-2c541417b11e" (UID: "13dde00b-2101-40d6-a327-2c541417b11e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.243728 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "13dde00b-2101-40d6-a327-2c541417b11e" (UID: "13dde00b-2101-40d6-a327-2c541417b11e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.268393 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "13dde00b-2101-40d6-a327-2c541417b11e" (UID: "13dde00b-2101-40d6-a327-2c541417b11e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.277368 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "13dde00b-2101-40d6-a327-2c541417b11e" (UID: "13dde00b-2101-40d6-a327-2c541417b11e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.297359 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.297401 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.297415 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rmpv\" (UniqueName: \"kubernetes.io/projected/13dde00b-2101-40d6-a327-2c541417b11e-kube-api-access-2rmpv\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.297428 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.297439 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13dde00b-2101-40d6-a327-2c541417b11e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.479325 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-r66tz"] Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.480049 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="init" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.480065 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="init" Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.480135 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="dnsmasq-dns" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.480142 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="dnsmasq-dns" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.480318 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="13dde00b-2101-40d6-a327-2c541417b11e" containerName="dnsmasq-dns" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.482392 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.485579 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.485833 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.486094 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.509027 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-r66tz"] Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.584058 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.592808 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-5sv75"] Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601115 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601288 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601309 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.601396 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsb5s\" (UniqueName: \"kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703242 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703275 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703297 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703317 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.703366 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsb5s\" (UniqueName: \"kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.703800 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.703817 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: E0122 06:05:47.703869 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:05:48.703839737 +0000 UTC m=+1209.542477740 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.704766 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.708370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.708753 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.709110 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.709126 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.710180 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.725442 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsb5s\" (UniqueName: \"kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s\") pod \"swift-ring-rebalance-r66tz\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.731527 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13dde00b-2101-40d6-a327-2c541417b11e" path="/var/lib/kubelet/pods/13dde00b-2101-40d6-a327-2c541417b11e/volumes" Jan 22 06:05:47 crc kubenswrapper[4982]: I0122 06:05:47.882010 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.177131 4982 generic.go:334] "Generic (PLEG): container finished" podID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerID="26cb56f170d7707dc66cad45b6af1d1edc1928c274d8d16583356e98c5ce26ef" exitCode=0 Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.177984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" event={"ID":"e5d4bc07-60ec-4fc9-9725-d80b94777e28","Type":"ContainerDied","Data":"26cb56f170d7707dc66cad45b6af1d1edc1928c274d8d16583356e98c5ce26ef"} Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.286247 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.370823 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="galera" probeResult="failure" output=< Jan 22 06:05:48 crc kubenswrapper[4982]: wsrep_local_state_comment (Joined) differs from Synced Jan 22 06:05:48 crc kubenswrapper[4982]: > Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.404121 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-r66tz"] Jan 22 06:05:48 crc kubenswrapper[4982]: W0122 06:05:48.409124 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9682832_1d53_4341_9dbc_10f057d1f6ad.slice/crio-88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e WatchSource:0}: Error finding container 88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e: Status 404 returned error can't find the container with id 88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.720620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:48 crc kubenswrapper[4982]: E0122 06:05:48.720864 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:05:48 crc kubenswrapper[4982]: E0122 06:05:48.721118 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:05:48 crc kubenswrapper[4982]: E0122 06:05:48.721186 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:05:50.721165561 +0000 UTC m=+1211.559803574 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.974298 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:05:48 crc kubenswrapper[4982]: I0122 06:05:48.974352 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:05:49 crc kubenswrapper[4982]: I0122 06:05:49.187753 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r66tz" event={"ID":"b9682832-1d53-4341-9dbc-10f057d1f6ad","Type":"ContainerStarted","Data":"88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e"} Jan 22 06:05:50 crc kubenswrapper[4982]: I0122 06:05:50.758588 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:50 crc kubenswrapper[4982]: E0122 06:05:50.758804 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:05:50 crc kubenswrapper[4982]: E0122 06:05:50.758894 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:05:50 crc kubenswrapper[4982]: E0122 06:05:50.758973 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:05:54.758930688 +0000 UTC m=+1215.597568701 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:05:53 crc kubenswrapper[4982]: I0122 06:05:53.234504 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" event={"ID":"e5d4bc07-60ec-4fc9-9725-d80b94777e28","Type":"ContainerStarted","Data":"cd46c815d9750e347bd39cdb53bc6764234032f78ba9d8b386f5799824377438"} Jan 22 06:05:53 crc kubenswrapper[4982]: I0122 06:05:53.689282 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 22 06:05:54 crc kubenswrapper[4982]: I0122 06:05:54.191187 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 22 06:05:54 crc kubenswrapper[4982]: I0122 06:05:54.243234 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:05:54 crc kubenswrapper[4982]: I0122 06:05:54.262255 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" podStartSLOduration=9.262234785 podStartE2EDuration="9.262234785s" podCreationTimestamp="2026-01-22 06:05:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:05:54.25818753 +0000 UTC m=+1215.096825553" watchObservedRunningTime="2026-01-22 06:05:54.262234785 +0000 UTC m=+1215.100872788" Jan 22 06:05:54 crc kubenswrapper[4982]: I0122 06:05:54.290002 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 22 06:05:54 crc kubenswrapper[4982]: I0122 06:05:54.846785 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:05:54 crc kubenswrapper[4982]: E0122 06:05:54.846963 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:05:54 crc kubenswrapper[4982]: E0122 06:05:54.847208 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:05:54 crc kubenswrapper[4982]: E0122 06:05:54.847353 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:06:02.847336962 +0000 UTC m=+1223.685974965 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:05:55 crc kubenswrapper[4982]: I0122 06:05:55.260051 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerStarted","Data":"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec"} Jan 22 06:05:55 crc kubenswrapper[4982]: I0122 06:05:55.260108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerStarted","Data":"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde"} Jan 22 06:05:55 crc kubenswrapper[4982]: I0122 06:05:55.260374 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 22 06:05:55 crc kubenswrapper[4982]: I0122 06:05:55.281673 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.440345072 podStartE2EDuration="10.281644293s" podCreationTimestamp="2026-01-22 06:05:45 +0000 UTC" firstStartedPulling="2026-01-22 06:05:46.338902184 +0000 UTC m=+1207.177540187" lastFinishedPulling="2026-01-22 06:05:54.180201405 +0000 UTC m=+1215.018839408" observedRunningTime="2026-01-22 06:05:55.277869695 +0000 UTC m=+1216.116507698" watchObservedRunningTime="2026-01-22 06:05:55.281644293 +0000 UTC m=+1216.120282296" Jan 22 06:05:57 crc kubenswrapper[4982]: I0122 06:05:57.322424 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r66tz" event={"ID":"b9682832-1d53-4341-9dbc-10f057d1f6ad","Type":"ContainerStarted","Data":"13f7a9e83037686cbbc1266994ac9f274abbaae84d3e10a553ddd1ed81c5e0c8"} Jan 22 06:05:57 crc kubenswrapper[4982]: I0122 06:05:57.346869 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-r66tz" podStartSLOduration=1.905743126 podStartE2EDuration="10.346818887s" podCreationTimestamp="2026-01-22 06:05:47 +0000 UTC" firstStartedPulling="2026-01-22 06:05:48.411284285 +0000 UTC m=+1209.249922288" lastFinishedPulling="2026-01-22 06:05:56.852360046 +0000 UTC m=+1217.690998049" observedRunningTime="2026-01-22 06:05:57.342883454 +0000 UTC m=+1218.181521497" watchObservedRunningTime="2026-01-22 06:05:57.346818887 +0000 UTC m=+1218.185456920" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.193724 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-lkcwv"] Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.212945 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lkcwv"] Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.213119 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.252166 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4641-account-create-update-dtcq4"] Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.253510 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.255591 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.258084 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4641-account-create-update-dtcq4"] Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.351110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbhvz\" (UniqueName: \"kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.351191 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.351237 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnw67\" (UniqueName: \"kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.351455 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.453304 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnw67\" (UniqueName: \"kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.453381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.453463 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbhvz\" (UniqueName: \"kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.453485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.454322 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.454351 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.471264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnw67\" (UniqueName: \"kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67\") pod \"glance-4641-account-create-update-dtcq4\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.472163 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbhvz\" (UniqueName: \"kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz\") pod \"glance-db-create-lkcwv\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.540476 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lkcwv" Jan 22 06:05:59 crc kubenswrapper[4982]: I0122 06:05:59.568183 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.041462 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lkcwv"] Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.100469 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4641-account-create-update-dtcq4"] Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.347135 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4641-account-create-update-dtcq4" event={"ID":"5263bf07-e4f4-41e6-8304-748716faf10c","Type":"ContainerStarted","Data":"423b6ad43153e8ebe85acafa36b79589a12cb9022f0450f0a6e23033dac68534"} Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.347189 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4641-account-create-update-dtcq4" event={"ID":"5263bf07-e4f4-41e6-8304-748716faf10c","Type":"ContainerStarted","Data":"0e6f8387a8d8f47a104011475b0b91e54541ffbb40fbe7da75c188467336c257"} Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.348619 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lkcwv" event={"ID":"b217180b-2309-401f-a98c-96ecd04ce551","Type":"ContainerStarted","Data":"6ec1fe11dc3ff5f1bbac61b12daeecd51d1202d93fc8aa01545ec8f54173fcc8"} Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.348662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lkcwv" event={"ID":"b217180b-2309-401f-a98c-96ecd04ce551","Type":"ContainerStarted","Data":"56ab01ff65a958e599489e4644e010acdeff4017f27712a5fe6e93b094caae04"} Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.364437 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-4641-account-create-update-dtcq4" podStartSLOduration=1.36442236 podStartE2EDuration="1.36442236s" podCreationTimestamp="2026-01-22 06:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:00.360097698 +0000 UTC m=+1221.198735701" watchObservedRunningTime="2026-01-22 06:06:00.36442236 +0000 UTC m=+1221.203060363" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.382844 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-lkcwv" podStartSLOduration=1.382825931 podStartE2EDuration="1.382825931s" podCreationTimestamp="2026-01-22 06:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:00.375420027 +0000 UTC m=+1221.214058040" watchObservedRunningTime="2026-01-22 06:06:00.382825931 +0000 UTC m=+1221.221463934" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.851446 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-xd8hj"] Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.852748 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.855960 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.875889 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-xd8hj"] Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.998766 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:00 crc kubenswrapper[4982]: I0122 06:06:00.998908 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f774t\" (UniqueName: \"kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.100510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.100623 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f774t\" (UniqueName: \"kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.101578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.130848 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f774t\" (UniqueName: \"kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t\") pod \"root-account-create-update-xd8hj\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.219007 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.219066 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.294660 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.294933 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="dnsmasq-dns" containerID="cri-o://d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea" gracePeriod=10 Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.432763 4982 generic.go:334] "Generic (PLEG): container finished" podID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerID="74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709" exitCode=0 Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.433119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerDied","Data":"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709"} Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.437070 4982 generic.go:334] "Generic (PLEG): container finished" podID="b217180b-2309-401f-a98c-96ecd04ce551" containerID="6ec1fe11dc3ff5f1bbac61b12daeecd51d1202d93fc8aa01545ec8f54173fcc8" exitCode=0 Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.437149 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lkcwv" event={"ID":"b217180b-2309-401f-a98c-96ecd04ce551","Type":"ContainerDied","Data":"6ec1fe11dc3ff5f1bbac61b12daeecd51d1202d93fc8aa01545ec8f54173fcc8"} Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.441268 4982 generic.go:334] "Generic (PLEG): container finished" podID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerID="3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f" exitCode=0 Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.441324 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerDied","Data":"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f"} Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.445690 4982 generic.go:334] "Generic (PLEG): container finished" podID="5263bf07-e4f4-41e6-8304-748716faf10c" containerID="423b6ad43153e8ebe85acafa36b79589a12cb9022f0450f0a6e23033dac68534" exitCode=0 Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.445728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4641-account-create-update-dtcq4" event={"ID":"5263bf07-e4f4-41e6-8304-748716faf10c","Type":"ContainerDied","Data":"423b6ad43153e8ebe85acafa36b79589a12cb9022f0450f0a6e23033dac68534"} Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.821435 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-xd8hj"] Jan 22 06:06:01 crc kubenswrapper[4982]: W0122 06:06:01.843484 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda754d10d_813c_4766_9818_1beca1eca54f.slice/crio-daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a WatchSource:0}: Error finding container daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a: Status 404 returned error can't find the container with id daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a Jan 22 06:06:01 crc kubenswrapper[4982]: I0122 06:06:01.963235 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.104563 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j72zq\" (UniqueName: \"kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq\") pod \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.104662 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config\") pod \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.104953 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc\") pod \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\" (UID: \"d62a0c01-8c58-4e69-8d63-ef332d9171fe\") " Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.114068 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq" (OuterVolumeSpecName: "kube-api-access-j72zq") pod "d62a0c01-8c58-4e69-8d63-ef332d9171fe" (UID: "d62a0c01-8c58-4e69-8d63-ef332d9171fe"). InnerVolumeSpecName "kube-api-access-j72zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.186836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config" (OuterVolumeSpecName: "config") pod "d62a0c01-8c58-4e69-8d63-ef332d9171fe" (UID: "d62a0c01-8c58-4e69-8d63-ef332d9171fe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.192373 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d62a0c01-8c58-4e69-8d63-ef332d9171fe" (UID: "d62a0c01-8c58-4e69-8d63-ef332d9171fe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.207043 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.207085 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j72zq\" (UniqueName: \"kubernetes.io/projected/d62a0c01-8c58-4e69-8d63-ef332d9171fe-kube-api-access-j72zq\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.207100 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d62a0c01-8c58-4e69-8d63-ef332d9171fe-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.453593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerStarted","Data":"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.454638 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.456717 4982 generic.go:334] "Generic (PLEG): container finished" podID="a754d10d-813c-4766-9818-1beca1eca54f" containerID="2508ceb17f5b081fedae60a792a07d63197d4ce625572aa4c03d2c6d1fd5177c" exitCode=0 Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.456756 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xd8hj" event={"ID":"a754d10d-813c-4766-9818-1beca1eca54f","Type":"ContainerDied","Data":"2508ceb17f5b081fedae60a792a07d63197d4ce625572aa4c03d2c6d1fd5177c"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.456771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xd8hj" event={"ID":"a754d10d-813c-4766-9818-1beca1eca54f","Type":"ContainerStarted","Data":"daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.459530 4982 generic.go:334] "Generic (PLEG): container finished" podID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerID="d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea" exitCode=0 Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.459661 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" event={"ID":"d62a0c01-8c58-4e69-8d63-ef332d9171fe","Type":"ContainerDied","Data":"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.459730 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" event={"ID":"d62a0c01-8c58-4e69-8d63-ef332d9171fe","Type":"ContainerDied","Data":"dab2f5604ad0bb6ea259766a843362f9713161f034079481f02890b7e828c414"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.459766 4982 scope.go:117] "RemoveContainer" containerID="d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.460001 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4qxqk" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.464467 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerStarted","Data":"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727"} Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.492057 4982 scope.go:117] "RemoveContainer" containerID="0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.503194 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.574130761 podStartE2EDuration="53.503159834s" podCreationTimestamp="2026-01-22 06:05:09 +0000 UTC" firstStartedPulling="2026-01-22 06:05:17.743301892 +0000 UTC m=+1178.581939925" lastFinishedPulling="2026-01-22 06:05:27.672330995 +0000 UTC m=+1188.510968998" observedRunningTime="2026-01-22 06:06:02.49266824 +0000 UTC m=+1223.331306243" watchObservedRunningTime="2026-01-22 06:06:02.503159834 +0000 UTC m=+1223.341797847" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.540910 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.086662571 podStartE2EDuration="53.540880577s" podCreationTimestamp="2026-01-22 06:05:09 +0000 UTC" firstStartedPulling="2026-01-22 06:05:11.203416466 +0000 UTC m=+1172.042054459" lastFinishedPulling="2026-01-22 06:05:27.657634462 +0000 UTC m=+1188.496272465" observedRunningTime="2026-01-22 06:06:02.524755216 +0000 UTC m=+1223.363393249" watchObservedRunningTime="2026-01-22 06:06:02.540880577 +0000 UTC m=+1223.379518600" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.566432 4982 scope.go:117] "RemoveContainer" containerID="d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea" Jan 22 06:06:02 crc kubenswrapper[4982]: E0122 06:06:02.567317 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea\": container with ID starting with d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea not found: ID does not exist" containerID="d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.567356 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea"} err="failed to get container status \"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea\": rpc error: code = NotFound desc = could not find container \"d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea\": container with ID starting with d7312a7e90c2435ccb2b4e88863771b3482d91262682652ca0980c3b30137aea not found: ID does not exist" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.567379 4982 scope.go:117] "RemoveContainer" containerID="0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709" Jan 22 06:06:02 crc kubenswrapper[4982]: E0122 06:06:02.567677 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709\": container with ID starting with 0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709 not found: ID does not exist" containerID="0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.567702 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709"} err="failed to get container status \"0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709\": rpc error: code = NotFound desc = could not find container \"0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709\": container with ID starting with 0b4a539450b8b5e8f3229963ac5df1b7bf5ec04aa8a6758b7ce804db1cf04709 not found: ID does not exist" Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.591533 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.598109 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4qxqk"] Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.918423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:06:02 crc kubenswrapper[4982]: E0122 06:06:02.918583 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 06:06:02 crc kubenswrapper[4982]: E0122 06:06:02.918596 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 06:06:02 crc kubenswrapper[4982]: E0122 06:06:02.918638 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift podName:0e66de65-9639-4397-bc39-dfcf0c325dff nodeName:}" failed. No retries permitted until 2026-01-22 06:06:18.918621613 +0000 UTC m=+1239.757259616 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift") pod "swift-storage-0" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff") : configmap "swift-ring-files" not found Jan 22 06:06:02 crc kubenswrapper[4982]: I0122 06:06:02.987575 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.065064 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lkcwv" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.121099 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts\") pod \"b217180b-2309-401f-a98c-96ecd04ce551\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.121211 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbhvz\" (UniqueName: \"kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz\") pod \"b217180b-2309-401f-a98c-96ecd04ce551\" (UID: \"b217180b-2309-401f-a98c-96ecd04ce551\") " Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.121231 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnw67\" (UniqueName: \"kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67\") pod \"5263bf07-e4f4-41e6-8304-748716faf10c\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.121269 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts\") pod \"5263bf07-e4f4-41e6-8304-748716faf10c\" (UID: \"5263bf07-e4f4-41e6-8304-748716faf10c\") " Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.122115 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5263bf07-e4f4-41e6-8304-748716faf10c" (UID: "5263bf07-e4f4-41e6-8304-748716faf10c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.122345 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b217180b-2309-401f-a98c-96ecd04ce551" (UID: "b217180b-2309-401f-a98c-96ecd04ce551"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.127023 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz" (OuterVolumeSpecName: "kube-api-access-gbhvz") pod "b217180b-2309-401f-a98c-96ecd04ce551" (UID: "b217180b-2309-401f-a98c-96ecd04ce551"). InnerVolumeSpecName "kube-api-access-gbhvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.127309 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67" (OuterVolumeSpecName: "kube-api-access-fnw67") pod "5263bf07-e4f4-41e6-8304-748716faf10c" (UID: "5263bf07-e4f4-41e6-8304-748716faf10c"). InnerVolumeSpecName "kube-api-access-fnw67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.223222 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b217180b-2309-401f-a98c-96ecd04ce551-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.223270 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbhvz\" (UniqueName: \"kubernetes.io/projected/b217180b-2309-401f-a98c-96ecd04ce551-kube-api-access-gbhvz\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.223285 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnw67\" (UniqueName: \"kubernetes.io/projected/5263bf07-e4f4-41e6-8304-748716faf10c-kube-api-access-fnw67\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.223299 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5263bf07-e4f4-41e6-8304-748716faf10c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.475898 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lkcwv" event={"ID":"b217180b-2309-401f-a98c-96ecd04ce551","Type":"ContainerDied","Data":"56ab01ff65a958e599489e4644e010acdeff4017f27712a5fe6e93b094caae04"} Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.475957 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56ab01ff65a958e599489e4644e010acdeff4017f27712a5fe6e93b094caae04" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.476050 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lkcwv" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.493432 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4641-account-create-update-dtcq4" event={"ID":"5263bf07-e4f4-41e6-8304-748716faf10c","Type":"ContainerDied","Data":"0e6f8387a8d8f47a104011475b0b91e54541ffbb40fbe7da75c188467336c257"} Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.493692 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e6f8387a8d8f47a104011475b0b91e54541ffbb40fbe7da75c188467336c257" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.493616 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4641-account-create-update-dtcq4" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.543701 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-b8kgd"] Jan 22 06:06:03 crc kubenswrapper[4982]: E0122 06:06:03.544132 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="init" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544155 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="init" Jan 22 06:06:03 crc kubenswrapper[4982]: E0122 06:06:03.544196 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b217180b-2309-401f-a98c-96ecd04ce551" containerName="mariadb-database-create" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544209 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b217180b-2309-401f-a98c-96ecd04ce551" containerName="mariadb-database-create" Jan 22 06:06:03 crc kubenswrapper[4982]: E0122 06:06:03.544235 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5263bf07-e4f4-41e6-8304-748716faf10c" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544245 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5263bf07-e4f4-41e6-8304-748716faf10c" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: E0122 06:06:03.544260 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="dnsmasq-dns" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544270 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="dnsmasq-dns" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544461 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" containerName="dnsmasq-dns" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544482 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5263bf07-e4f4-41e6-8304-748716faf10c" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.544495 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b217180b-2309-401f-a98c-96ecd04ce551" containerName="mariadb-database-create" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.545095 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.564234 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b8kgd"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.630791 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kws5h\" (UniqueName: \"kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.631019 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.652046 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-61bb-account-create-update-5ntzd"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.653438 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.656141 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.661320 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-61bb-account-create-update-5ntzd"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.728881 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d62a0c01-8c58-4e69-8d63-ef332d9171fe" path="/var/lib/kubelet/pods/d62a0c01-8c58-4e69-8d63-ef332d9171fe/volumes" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.733674 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r28vt\" (UniqueName: \"kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.733727 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kws5h\" (UniqueName: \"kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.733773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.733809 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.735050 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.751812 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kws5h\" (UniqueName: \"kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h\") pod \"keystone-db-create-b8kgd\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.835334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r28vt\" (UniqueName: \"kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.835420 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.836649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.836832 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-7vm6q"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.839400 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.849883 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7vm6q"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.862491 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r28vt\" (UniqueName: \"kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt\") pod \"keystone-61bb-account-create-update-5ntzd\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.866214 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.936796 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.937035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmstc\" (UniqueName: \"kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.953350 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.974527 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-49e2-account-create-update-jrl9m"] Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.974815 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:03 crc kubenswrapper[4982]: E0122 06:06:03.975147 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a754d10d-813c-4766-9818-1beca1eca54f" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.975163 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a754d10d-813c-4766-9818-1beca1eca54f" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.975319 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a754d10d-813c-4766-9818-1beca1eca54f" containerName="mariadb-account-create-update" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.975794 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.980156 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 22 06:06:03 crc kubenswrapper[4982]: I0122 06:06:03.991927 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-49e2-account-create-update-jrl9m"] Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038040 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f774t\" (UniqueName: \"kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t\") pod \"a754d10d-813c-4766-9818-1beca1eca54f\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038126 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts\") pod \"a754d10d-813c-4766-9818-1beca1eca54f\" (UID: \"a754d10d-813c-4766-9818-1beca1eca54f\") " Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038610 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmstc\" (UniqueName: \"kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038685 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.038758 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv9lj\" (UniqueName: \"kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.039715 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.039811 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a754d10d-813c-4766-9818-1beca1eca54f" (UID: "a754d10d-813c-4766-9818-1beca1eca54f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.042923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t" (OuterVolumeSpecName: "kube-api-access-f774t") pod "a754d10d-813c-4766-9818-1beca1eca54f" (UID: "a754d10d-813c-4766-9818-1beca1eca54f"). InnerVolumeSpecName "kube-api-access-f774t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.063459 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmstc\" (UniqueName: \"kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc\") pod \"placement-db-create-7vm6q\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.140494 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.140589 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv9lj\" (UniqueName: \"kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.140661 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f774t\" (UniqueName: \"kubernetes.io/projected/a754d10d-813c-4766-9818-1beca1eca54f-kube-api-access-f774t\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.140674 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a754d10d-813c-4766-9818-1beca1eca54f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.144004 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.156102 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.158344 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv9lj\" (UniqueName: \"kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj\") pod \"placement-49e2-account-create-update-jrl9m\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.290468 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.363809 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b8kgd"] Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.476712 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-z7b8c"] Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.478383 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.483149 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jk7tf" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.483380 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.524123 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-z7b8c"] Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.546986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.547065 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfjfm\" (UniqueName: \"kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.547096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.547128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.550331 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9682832-1d53-4341-9dbc-10f057d1f6ad" containerID="13f7a9e83037686cbbc1266994ac9f274abbaae84d3e10a553ddd1ed81c5e0c8" exitCode=0 Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.550361 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r66tz" event={"ID":"b9682832-1d53-4341-9dbc-10f057d1f6ad","Type":"ContainerDied","Data":"13f7a9e83037686cbbc1266994ac9f274abbaae84d3e10a553ddd1ed81c5e0c8"} Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.552654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xd8hj" event={"ID":"a754d10d-813c-4766-9818-1beca1eca54f","Type":"ContainerDied","Data":"daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a"} Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.552684 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="daba0deafca21b369e2f6bdb4aeaeafa8ff08cee2a71ab855c51cf416206300a" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.552697 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xd8hj" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.568180 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b8kgd" event={"ID":"d24e3505-d3d7-49fa-80fd-081138916d20","Type":"ContainerStarted","Data":"b1b3ee18cb74c1b5a590b87ea720ca7531b3f3ee964800b8909aba2387235bb0"} Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.576446 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-61bb-account-create-update-5ntzd"] Jan 22 06:06:04 crc kubenswrapper[4982]: W0122 06:06:04.583458 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26e1b065_9ba1_4c27_90a3_11b78de079b7.slice/crio-59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3 WatchSource:0}: Error finding container 59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3: Status 404 returned error can't find the container with id 59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3 Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.673402 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.673473 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfjfm\" (UniqueName: \"kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.673500 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.673532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.679369 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7vm6q"] Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.689053 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.689590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.696554 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.697019 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfjfm\" (UniqueName: \"kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm\") pod \"glance-db-sync-z7b8c\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.823417 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-49e2-account-create-update-jrl9m"] Jan 22 06:06:04 crc kubenswrapper[4982]: W0122 06:06:04.825217 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb64b830_eb39_4a4c_8413_04876d86e890.slice/crio-22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697 WatchSource:0}: Error finding container 22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697: Status 404 returned error can't find the container with id 22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697 Jan 22 06:06:04 crc kubenswrapper[4982]: I0122 06:06:04.877014 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.399134 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-z7b8c"] Jan 22 06:06:05 crc kubenswrapper[4982]: W0122 06:06:05.408275 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c49756b_5f06_470b_9bc5_281b5bfbb198.slice/crio-6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39 WatchSource:0}: Error finding container 6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39: Status 404 returned error can't find the container with id 6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39 Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.575585 4982 generic.go:334] "Generic (PLEG): container finished" podID="26e1b065-9ba1-4c27-90a3-11b78de079b7" containerID="96daef9a1308445e129ae30eb202f53f16ca0d76f654c7453781f263ed77a164" exitCode=0 Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.575674 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-61bb-account-create-update-5ntzd" event={"ID":"26e1b065-9ba1-4c27-90a3-11b78de079b7","Type":"ContainerDied","Data":"96daef9a1308445e129ae30eb202f53f16ca0d76f654c7453781f263ed77a164"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.575890 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-61bb-account-create-update-5ntzd" event={"ID":"26e1b065-9ba1-4c27-90a3-11b78de079b7","Type":"ContainerStarted","Data":"59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.577212 4982 generic.go:334] "Generic (PLEG): container finished" podID="d24e3505-d3d7-49fa-80fd-081138916d20" containerID="96443aee780992f73d8bf465dd81fc5713de4eb03bf77c564cee2ba08450265c" exitCode=0 Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.577279 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b8kgd" event={"ID":"d24e3505-d3d7-49fa-80fd-081138916d20","Type":"ContainerDied","Data":"96443aee780992f73d8bf465dd81fc5713de4eb03bf77c564cee2ba08450265c"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.578767 4982 generic.go:334] "Generic (PLEG): container finished" podID="96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" containerID="d942076c8167ac6a314e546366e8b1ceb42275cdc5d5080afef9aefeffe2759d" exitCode=0 Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.578796 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7vm6q" event={"ID":"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee","Type":"ContainerDied","Data":"d942076c8167ac6a314e546366e8b1ceb42275cdc5d5080afef9aefeffe2759d"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.578827 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7vm6q" event={"ID":"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee","Type":"ContainerStarted","Data":"0cb10a606573ea2556505605c0daf553876f6881958d7379d7bebf5cb5a626f5"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.580275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z7b8c" event={"ID":"4c49756b-5f06-470b-9bc5-281b5bfbb198","Type":"ContainerStarted","Data":"6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.581418 4982 generic.go:334] "Generic (PLEG): container finished" podID="eb64b830-eb39-4a4c-8413-04876d86e890" containerID="91dad18bfff7d230114853f2b002e5d721b5ae8faf28fe8993f5274ca7c94eec" exitCode=0 Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.581482 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-49e2-account-create-update-jrl9m" event={"ID":"eb64b830-eb39-4a4c-8413-04876d86e890","Type":"ContainerDied","Data":"91dad18bfff7d230114853f2b002e5d721b5ae8faf28fe8993f5274ca7c94eec"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.581525 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-49e2-account-create-update-jrl9m" event={"ID":"eb64b830-eb39-4a4c-8413-04876d86e890","Type":"ContainerStarted","Data":"22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697"} Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.839013 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 22 06:06:05 crc kubenswrapper[4982]: I0122 06:06:05.993671 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094133 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094215 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094322 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094368 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094462 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsb5s\" (UniqueName: \"kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts\") pod \"b9682832-1d53-4341-9dbc-10f057d1f6ad\" (UID: \"b9682832-1d53-4341-9dbc-10f057d1f6ad\") " Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.094967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.096365 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.102196 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s" (OuterVolumeSpecName: "kube-api-access-gsb5s") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "kube-api-access-gsb5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.117769 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.121304 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.133201 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts" (OuterVolumeSpecName: "scripts") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.147350 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9682832-1d53-4341-9dbc-10f057d1f6ad" (UID: "b9682832-1d53-4341-9dbc-10f057d1f6ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196477 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196524 4982 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9682832-1d53-4341-9dbc-10f057d1f6ad-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196536 4982 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196548 4982 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196560 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsb5s\" (UniqueName: \"kubernetes.io/projected/b9682832-1d53-4341-9dbc-10f057d1f6ad-kube-api-access-gsb5s\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196572 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9682832-1d53-4341-9dbc-10f057d1f6ad-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.196584 4982 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9682832-1d53-4341-9dbc-10f057d1f6ad-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.600721 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r66tz" Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.600792 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-r66tz" event={"ID":"b9682832-1d53-4341-9dbc-10f057d1f6ad","Type":"ContainerDied","Data":"88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e"} Jan 22 06:06:06 crc kubenswrapper[4982]: I0122 06:06:06.600826 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88bbbc0a30a0cf8ae4bf2efa29bac91b48733dfb03cbc2827aa6f8af06d2208e" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.042508 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.113809 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv9lj\" (UniqueName: \"kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj\") pod \"eb64b830-eb39-4a4c-8413-04876d86e890\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.113903 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts\") pod \"eb64b830-eb39-4a4c-8413-04876d86e890\" (UID: \"eb64b830-eb39-4a4c-8413-04876d86e890\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.114787 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb64b830-eb39-4a4c-8413-04876d86e890" (UID: "eb64b830-eb39-4a4c-8413-04876d86e890"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.119965 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj" (OuterVolumeSpecName: "kube-api-access-fv9lj") pod "eb64b830-eb39-4a4c-8413-04876d86e890" (UID: "eb64b830-eb39-4a4c-8413-04876d86e890"). InnerVolumeSpecName "kube-api-access-fv9lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.201336 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-xd8hj"] Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.203427 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.208496 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.211397 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-xd8hj"] Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.214693 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.215331 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv9lj\" (UniqueName: \"kubernetes.io/projected/eb64b830-eb39-4a4c-8413-04876d86e890-kube-api-access-fv9lj\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.215360 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb64b830-eb39-4a4c-8413-04876d86e890-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316429 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kws5h\" (UniqueName: \"kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h\") pod \"d24e3505-d3d7-49fa-80fd-081138916d20\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316525 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts\") pod \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316645 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts\") pod \"26e1b065-9ba1-4c27-90a3-11b78de079b7\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316689 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts\") pod \"d24e3505-d3d7-49fa-80fd-081138916d20\" (UID: \"d24e3505-d3d7-49fa-80fd-081138916d20\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316734 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r28vt\" (UniqueName: \"kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt\") pod \"26e1b065-9ba1-4c27-90a3-11b78de079b7\" (UID: \"26e1b065-9ba1-4c27-90a3-11b78de079b7\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.316781 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmstc\" (UniqueName: \"kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc\") pod \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\" (UID: \"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee\") " Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.317546 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d24e3505-d3d7-49fa-80fd-081138916d20" (UID: "d24e3505-d3d7-49fa-80fd-081138916d20"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.317544 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26e1b065-9ba1-4c27-90a3-11b78de079b7" (UID: "26e1b065-9ba1-4c27-90a3-11b78de079b7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.317702 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" (UID: "96ba8b13-a38d-49d1-9f3b-a24ec4dabbee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.321701 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc" (OuterVolumeSpecName: "kube-api-access-bmstc") pod "96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" (UID: "96ba8b13-a38d-49d1-9f3b-a24ec4dabbee"). InnerVolumeSpecName "kube-api-access-bmstc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.323954 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt" (OuterVolumeSpecName: "kube-api-access-r28vt") pod "26e1b065-9ba1-4c27-90a3-11b78de079b7" (UID: "26e1b065-9ba1-4c27-90a3-11b78de079b7"). InnerVolumeSpecName "kube-api-access-r28vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.331138 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h" (OuterVolumeSpecName: "kube-api-access-kws5h") pod "d24e3505-d3d7-49fa-80fd-081138916d20" (UID: "d24e3505-d3d7-49fa-80fd-081138916d20"). InnerVolumeSpecName "kube-api-access-kws5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418451 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kws5h\" (UniqueName: \"kubernetes.io/projected/d24e3505-d3d7-49fa-80fd-081138916d20-kube-api-access-kws5h\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418486 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418495 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e1b065-9ba1-4c27-90a3-11b78de079b7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418504 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24e3505-d3d7-49fa-80fd-081138916d20-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418514 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r28vt\" (UniqueName: \"kubernetes.io/projected/26e1b065-9ba1-4c27-90a3-11b78de079b7-kube-api-access-r28vt\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.418522 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmstc\" (UniqueName: \"kubernetes.io/projected/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee-kube-api-access-bmstc\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.608652 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7vm6q" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.608649 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7vm6q" event={"ID":"96ba8b13-a38d-49d1-9f3b-a24ec4dabbee","Type":"ContainerDied","Data":"0cb10a606573ea2556505605c0daf553876f6881958d7379d7bebf5cb5a626f5"} Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.608794 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cb10a606573ea2556505605c0daf553876f6881958d7379d7bebf5cb5a626f5" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.610187 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-49e2-account-create-update-jrl9m" event={"ID":"eb64b830-eb39-4a4c-8413-04876d86e890","Type":"ContainerDied","Data":"22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697"} Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.610221 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-49e2-account-create-update-jrl9m" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.610235 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22948996fad807b9db60dcd3db7ce4d1795e6c7e5e7136365a82c17f4a11d697" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.613686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-61bb-account-create-update-5ntzd" event={"ID":"26e1b065-9ba1-4c27-90a3-11b78de079b7","Type":"ContainerDied","Data":"59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3"} Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.613703 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-5ntzd" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.613717 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59e2b34d547223124cc72bd8613ccd96b92240c5f81f1cd379193e2f9d8762a3" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.615274 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b8kgd" event={"ID":"d24e3505-d3d7-49fa-80fd-081138916d20","Type":"ContainerDied","Data":"b1b3ee18cb74c1b5a590b87ea720ca7531b3f3ee964800b8909aba2387235bb0"} Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.615297 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b8kgd" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.615313 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1b3ee18cb74c1b5a590b87ea720ca7531b3f3ee964800b8909aba2387235bb0" Jan 22 06:06:07 crc kubenswrapper[4982]: I0122 06:06:07.729172 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a754d10d-813c-4766-9818-1beca1eca54f" path="/var/lib/kubelet/pods/a754d10d-813c-4766-9818-1beca1eca54f/volumes" Jan 22 06:06:10 crc kubenswrapper[4982]: I0122 06:06:10.644146 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.039203 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" probeResult="failure" output=< Jan 22 06:06:11 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 22 06:06:11 crc kubenswrapper[4982]: > Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.049072 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.156426 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.173599 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419261 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4z48g-config-fmr98"] Jan 22 06:06:11 crc kubenswrapper[4982]: E0122 06:06:11.419631 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb64b830-eb39-4a4c-8413-04876d86e890" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419654 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb64b830-eb39-4a4c-8413-04876d86e890" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: E0122 06:06:11.419668 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d24e3505-d3d7-49fa-80fd-081138916d20" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419677 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d24e3505-d3d7-49fa-80fd-081138916d20" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: E0122 06:06:11.419695 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26e1b065-9ba1-4c27-90a3-11b78de079b7" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419703 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="26e1b065-9ba1-4c27-90a3-11b78de079b7" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: E0122 06:06:11.419720 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419728 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: E0122 06:06:11.419751 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9682832-1d53-4341-9dbc-10f057d1f6ad" containerName="swift-ring-rebalance" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.419760 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9682832-1d53-4341-9dbc-10f057d1f6ad" containerName="swift-ring-rebalance" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420005 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9682832-1d53-4341-9dbc-10f057d1f6ad" containerName="swift-ring-rebalance" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420024 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d24e3505-d3d7-49fa-80fd-081138916d20" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420039 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" containerName="mariadb-database-create" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420056 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="26e1b065-9ba1-4c27-90a3-11b78de079b7" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420074 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb64b830-eb39-4a4c-8413-04876d86e890" containerName="mariadb-account-create-update" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.420680 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.425198 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.473436 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g-config-fmr98"] Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487091 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487150 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487177 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487232 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8785f\" (UniqueName: \"kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.487354 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588383 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8785f\" (UniqueName: \"kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588467 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588551 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588583 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588608 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.588637 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.589104 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.589118 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.589118 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.589729 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.590935 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.612353 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8785f\" (UniqueName: \"kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f\") pod \"ovn-controller-4z48g-config-fmr98\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:11 crc kubenswrapper[4982]: I0122 06:06:11.739821 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.205572 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-4mft6"] Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.206546 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.213999 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.216794 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4mft6"] Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.301599 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vxhr\" (UniqueName: \"kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.301866 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.403798 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vxhr\" (UniqueName: \"kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.403950 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.404844 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.423821 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vxhr\" (UniqueName: \"kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr\") pod \"root-account-create-update-4mft6\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:12 crc kubenswrapper[4982]: I0122 06:06:12.560713 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:16 crc kubenswrapper[4982]: I0122 06:06:16.038455 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" probeResult="failure" output=< Jan 22 06:06:16 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 22 06:06:16 crc kubenswrapper[4982]: > Jan 22 06:06:18 crc kubenswrapper[4982]: I0122 06:06:18.939442 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:06:18 crc kubenswrapper[4982]: I0122 06:06:18.951442 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"swift-storage-0\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " pod="openstack/swift-storage-0" Jan 22 06:06:18 crc kubenswrapper[4982]: I0122 06:06:18.974112 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:06:18 crc kubenswrapper[4982]: I0122 06:06:18.974181 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:06:19 crc kubenswrapper[4982]: I0122 06:06:19.086844 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 06:06:20 crc kubenswrapper[4982]: I0122 06:06:20.647167 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 06:06:20 crc kubenswrapper[4982]: I0122 06:06:20.953874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-4zhph"] Jan 22 06:06:20 crc kubenswrapper[4982]: I0122 06:06:20.955109 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:20 crc kubenswrapper[4982]: I0122 06:06:20.964350 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-4zhph"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.035324 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nfq8n"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.036328 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.060913 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nfq8n"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.069640 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-8a3c-account-create-update-q6nqg"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.070566 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.075149 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.075396 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" probeResult="failure" output=< Jan 22 06:06:21 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 22 06:06:21 crc kubenswrapper[4982]: > Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.092995 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8a3c-account-create-update-q6nqg"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.103932 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.105130 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shz29\" (UniqueName: \"kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.105456 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf6cr\" (UniqueName: \"kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.105557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.160925 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-a8f0-account-create-update-2nvzk"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.163078 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.170564 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.187540 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a8f0-account-create-update-2nvzk"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.206698 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.206835 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf6cr\" (UniqueName: \"kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.206996 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.207030 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kldhj\" (UniqueName: \"kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.207146 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.207187 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shz29\" (UniqueName: \"kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.208230 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.246685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: E0122 06:06:21.270618 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 22 06:06:21 crc kubenswrapper[4982]: E0122 06:06:21.270988 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zfjfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-z7b8c_openstack(4c49756b-5f06-470b-9bc5-281b5bfbb198): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:06:21 crc kubenswrapper[4982]: E0122 06:06:21.272184 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-z7b8c" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.278653 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shz29\" (UniqueName: \"kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29\") pod \"cinder-db-create-4zhph\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.278671 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf6cr\" (UniqueName: \"kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr\") pod \"barbican-db-create-nfq8n\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.282532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.310841 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kldhj\" (UniqueName: \"kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.310934 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2r7h\" (UniqueName: \"kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.310978 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.311030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.311677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.353635 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kldhj\" (UniqueName: \"kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj\") pod \"cinder-8a3c-account-create-update-q6nqg\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.363381 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.386120 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.409256 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-vdfzh"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.410211 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.414062 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2r7h\" (UniqueName: \"kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.414130 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.414745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.422219 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.422494 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jckfq" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.422589 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.422627 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.431369 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vdfzh"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.468745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2r7h\" (UniqueName: \"kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h\") pod \"barbican-a8f0-account-create-update-2nvzk\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.487964 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-x2w8g"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.489009 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.505795 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.522012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.522086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.522119 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.522175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5chtr\" (UniqueName: \"kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.522207 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svbpb\" (UniqueName: \"kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.540409 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a9e6-account-create-update-nmnpj"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.541439 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.550147 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.550714 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x2w8g"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.561925 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a9e6-account-create-update-nmnpj"] Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.623929 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.623998 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbt7q\" (UniqueName: \"kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.624023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.624060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.624084 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.624128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5chtr\" (UniqueName: \"kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.624163 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svbpb\" (UniqueName: \"kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.625306 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.631254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.631759 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.648572 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svbpb\" (UniqueName: \"kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb\") pod \"neutron-db-create-x2w8g\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.658303 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5chtr\" (UniqueName: \"kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr\") pod \"keystone-db-sync-vdfzh\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.731036 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbt7q\" (UniqueName: \"kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.731105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.731779 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.750510 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:21 crc kubenswrapper[4982]: E0122 06:06:21.754201 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-z7b8c" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.775438 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbt7q\" (UniqueName: \"kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q\") pod \"neutron-a9e6-account-create-update-nmnpj\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.846089 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:21 crc kubenswrapper[4982]: I0122 06:06:21.873287 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.133917 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4mft6"] Jan 22 06:06:22 crc kubenswrapper[4982]: W0122 06:06:22.142278 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4ab262f_f627_4891_9fd7_efc5e0e8a2eb.slice/crio-209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54 WatchSource:0}: Error finding container 209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54: Status 404 returned error can't find the container with id 209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54 Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.180232 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-4zhph"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.295646 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a8f0-account-create-update-2nvzk"] Jan 22 06:06:22 crc kubenswrapper[4982]: W0122 06:06:22.302440 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cae5a46_3cc2_4b42_bf1b_4c429c489f81.slice/crio-8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37 WatchSource:0}: Error finding container 8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37: Status 404 returned error can't find the container with id 8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37 Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.332893 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g-config-fmr98"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.338921 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nfq8n"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.386920 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.466999 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vdfzh"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.480846 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8a3c-account-create-update-q6nqg"] Jan 22 06:06:22 crc kubenswrapper[4982]: W0122 06:06:22.483105 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e75f34c_23aa_46c5_a382_7ff49efa327e.slice/crio-4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623 WatchSource:0}: Error finding container 4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623: Status 404 returned error can't find the container with id 4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623 Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.606148 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x2w8g"] Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.613710 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a9e6-account-create-update-nmnpj"] Jan 22 06:06:22 crc kubenswrapper[4982]: W0122 06:06:22.636132 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6279d28e_2c9c_4e83_9db9_db105f664da4.slice/crio-bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d WatchSource:0}: Error finding container bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d: Status 404 returned error can't find the container with id bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.778041 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nfq8n" event={"ID":"63574be5-6a79-44af-b36a-2d32bd676edd","Type":"ContainerStarted","Data":"e4bcbdccd4c9eb15eecdd175dc951dcc4cded5d82f01be1657956b30eb3f4bbb"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.779480 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nfq8n" event={"ID":"63574be5-6a79-44af-b36a-2d32bd676edd","Type":"ContainerStarted","Data":"19bb62bac6623b870651ce0f3d7a5189380c6376a8590b7f6a93e9cf18f06546"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.783256 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a9e6-account-create-update-nmnpj" event={"ID":"6279d28e-2c9c-4e83-9db9-db105f664da4","Type":"ContainerStarted","Data":"bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.785978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8a3c-account-create-update-q6nqg" event={"ID":"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1","Type":"ContainerStarted","Data":"041fb90cabe67b0ff6a1f729262835dc3719ade67e8c157c92d95706cf6a990b"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.786042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8a3c-account-create-update-q6nqg" event={"ID":"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1","Type":"ContainerStarted","Data":"f517eba84e83fa33eb46ceee1ae1c4881e11bfe3ec9bb017c2c60554dd3986ed"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.797751 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4mft6" event={"ID":"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb","Type":"ContainerStarted","Data":"6d529e2f9c7a8d7c7b0443548df383571f79d3ebf05dff9c71e77849bbfde977"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.798077 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4mft6" event={"ID":"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb","Type":"ContainerStarted","Data":"209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.799768 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-nfq8n" podStartSLOduration=1.79974626 podStartE2EDuration="1.79974626s" podCreationTimestamp="2026-01-22 06:06:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.79704206 +0000 UTC m=+1243.635680063" watchObservedRunningTime="2026-01-22 06:06:22.79974626 +0000 UTC m=+1243.638384263" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.805606 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-4zhph" event={"ID":"cb36afed-98e0-460c-ac13-f09334f4334e","Type":"ContainerStarted","Data":"3cc5e4d2d0314f0d79f3bc022c76c4814327e535cb229ac9ea69b99793b639e1"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.805648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-4zhph" event={"ID":"cb36afed-98e0-460c-ac13-f09334f4334e","Type":"ContainerStarted","Data":"a74952a742fa6a3b12fbde3bb7a0b9b0664e79ffc6e52f90d5abcbef736988e0"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.845410 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-2nvzk" event={"ID":"4cae5a46-3cc2-4b42-bf1b-4c429c489f81","Type":"ContainerStarted","Data":"32877ec2e3d36e3e0a5db604267c0aa2ff16bc6c3b616ac86b4b260c0191e454"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.845464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-2nvzk" event={"ID":"4cae5a46-3cc2-4b42-bf1b-4c429c489f81","Type":"ContainerStarted","Data":"8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.853982 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-fmr98" event={"ID":"ba33954f-ac40-4d09-8e9b-500d59a92729","Type":"ContainerStarted","Data":"cf2bb070f887de8ffa9f6fa71a4230e53ea11650e53bc5ad809d7ec836fff0a2"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.854024 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-fmr98" event={"ID":"ba33954f-ac40-4d09-8e9b-500d59a92729","Type":"ContainerStarted","Data":"b8a02058e38ca97279728c3fbcc050a12b457e43c989fa44ce7019119fb31b3c"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.855852 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"69159f31516b9b97593444a8f33bb79c004b1603ecffb30aa1115efbc9c78432"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.866409 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x2w8g" event={"ID":"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926","Type":"ContainerStarted","Data":"e491a1a73de0ffbf1d70001f8368865965449b8e7022b2e50fe113ea6e4ae427"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.870034 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vdfzh" event={"ID":"6e75f34c-23aa-46c5-a382-7ff49efa327e","Type":"ContainerStarted","Data":"4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623"} Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.874226 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-8a3c-account-create-update-q6nqg" podStartSLOduration=1.8742018630000001 podStartE2EDuration="1.874201863s" podCreationTimestamp="2026-01-22 06:06:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.835815511 +0000 UTC m=+1243.674453524" watchObservedRunningTime="2026-01-22 06:06:22.874201863 +0000 UTC m=+1243.712839866" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.914682 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-4zhph" podStartSLOduration=2.914665988 podStartE2EDuration="2.914665988s" podCreationTimestamp="2026-01-22 06:06:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.860087945 +0000 UTC m=+1243.698725968" watchObservedRunningTime="2026-01-22 06:06:22.914665988 +0000 UTC m=+1243.753303991" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.933826 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-4mft6" podStartSLOduration=10.933809768 podStartE2EDuration="10.933809768s" podCreationTimestamp="2026-01-22 06:06:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.911099826 +0000 UTC m=+1243.749737829" watchObservedRunningTime="2026-01-22 06:06:22.933809768 +0000 UTC m=+1243.772447771" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.942398 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4z48g-config-fmr98" podStartSLOduration=11.942384122 podStartE2EDuration="11.942384122s" podCreationTimestamp="2026-01-22 06:06:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.932721779 +0000 UTC m=+1243.771359782" watchObservedRunningTime="2026-01-22 06:06:22.942384122 +0000 UTC m=+1243.781022115" Jan 22 06:06:22 crc kubenswrapper[4982]: I0122 06:06:22.953628 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-a8f0-account-create-update-2nvzk" podStartSLOduration=1.953613055 podStartE2EDuration="1.953613055s" podCreationTimestamp="2026-01-22 06:06:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:22.94922077 +0000 UTC m=+1243.787858773" watchObservedRunningTime="2026-01-22 06:06:22.953613055 +0000 UTC m=+1243.792251058" Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.911875 4982 generic.go:334] "Generic (PLEG): container finished" podID="cb36afed-98e0-460c-ac13-f09334f4334e" containerID="3cc5e4d2d0314f0d79f3bc022c76c4814327e535cb229ac9ea69b99793b639e1" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.912330 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-4zhph" event={"ID":"cb36afed-98e0-460c-ac13-f09334f4334e","Type":"ContainerDied","Data":"3cc5e4d2d0314f0d79f3bc022c76c4814327e535cb229ac9ea69b99793b639e1"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.913900 4982 generic.go:334] "Generic (PLEG): container finished" podID="4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" containerID="041fb90cabe67b0ff6a1f729262835dc3719ade67e8c157c92d95706cf6a990b" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.914001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8a3c-account-create-update-q6nqg" event={"ID":"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1","Type":"ContainerDied","Data":"041fb90cabe67b0ff6a1f729262835dc3719ade67e8c157c92d95706cf6a990b"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.917816 4982 generic.go:334] "Generic (PLEG): container finished" podID="ba33954f-ac40-4d09-8e9b-500d59a92729" containerID="cf2bb070f887de8ffa9f6fa71a4230e53ea11650e53bc5ad809d7ec836fff0a2" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.917973 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-fmr98" event={"ID":"ba33954f-ac40-4d09-8e9b-500d59a92729","Type":"ContainerDied","Data":"cf2bb070f887de8ffa9f6fa71a4230e53ea11650e53bc5ad809d7ec836fff0a2"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.919757 4982 generic.go:334] "Generic (PLEG): container finished" podID="c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" containerID="2dac11986a165e87adcb29b9332da29904dd3b38da6e010dced58f56bcf4ad7a" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.919817 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x2w8g" event={"ID":"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926","Type":"ContainerDied","Data":"2dac11986a165e87adcb29b9332da29904dd3b38da6e010dced58f56bcf4ad7a"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.921599 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" containerID="6d529e2f9c7a8d7c7b0443548df383571f79d3ebf05dff9c71e77849bbfde977" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.921647 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4mft6" event={"ID":"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb","Type":"ContainerDied","Data":"6d529e2f9c7a8d7c7b0443548df383571f79d3ebf05dff9c71e77849bbfde977"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.923013 4982 generic.go:334] "Generic (PLEG): container finished" podID="63574be5-6a79-44af-b36a-2d32bd676edd" containerID="e4bcbdccd4c9eb15eecdd175dc951dcc4cded5d82f01be1657956b30eb3f4bbb" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.923060 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nfq8n" event={"ID":"63574be5-6a79-44af-b36a-2d32bd676edd","Type":"ContainerDied","Data":"e4bcbdccd4c9eb15eecdd175dc951dcc4cded5d82f01be1657956b30eb3f4bbb"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.925419 4982 generic.go:334] "Generic (PLEG): container finished" podID="6279d28e-2c9c-4e83-9db9-db105f664da4" containerID="715c04a2da3985828f5bdcf5384f56119ba51d448ff26fc87cbe6892e62a56ab" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.925456 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a9e6-account-create-update-nmnpj" event={"ID":"6279d28e-2c9c-4e83-9db9-db105f664da4","Type":"ContainerDied","Data":"715c04a2da3985828f5bdcf5384f56119ba51d448ff26fc87cbe6892e62a56ab"} Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.926657 4982 generic.go:334] "Generic (PLEG): container finished" podID="4cae5a46-3cc2-4b42-bf1b-4c429c489f81" containerID="32877ec2e3d36e3e0a5db604267c0aa2ff16bc6c3b616ac86b4b260c0191e454" exitCode=0 Jan 22 06:06:23 crc kubenswrapper[4982]: I0122 06:06:23.926684 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-2nvzk" event={"ID":"4cae5a46-3cc2-4b42-bf1b-4c429c489f81","Type":"ContainerDied","Data":"32877ec2e3d36e3e0a5db604267c0aa2ff16bc6c3b616ac86b4b260c0191e454"} Jan 22 06:06:24 crc kubenswrapper[4982]: I0122 06:06:24.944383 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"0c83088039377322228a75312f73aef354b452dc32b72f3e7422b93e15270c5a"} Jan 22 06:06:24 crc kubenswrapper[4982]: I0122 06:06:24.944728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"95bd78bf0085807f031d31a6e3f8ecd58ff306054033420b516a89b6c828e39b"} Jan 22 06:06:24 crc kubenswrapper[4982]: I0122 06:06:24.944741 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"4a16824db8797178b4f0e9143a8c7fd2a4c2b43cd6bdf358987351b835a9dfa3"} Jan 22 06:06:24 crc kubenswrapper[4982]: I0122 06:06:24.944753 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"78cb51cdcbb32c7c566dd440fe3eddf657c1d8fa7d725c7170c6f86381854c75"} Jan 22 06:06:26 crc kubenswrapper[4982]: I0122 06:06:26.033617 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4z48g" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.252945 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.320486 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.326031 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.336612 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svbpb\" (UniqueName: \"kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb\") pod \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.336892 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts\") pod \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\" (UID: \"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.337823 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" (UID: "c42a1abb-5389-40ec-b9b3-c7e7c3ea6926"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.337846 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.374630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb" (OuterVolumeSpecName: "kube-api-access-svbpb") pod "c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" (UID: "c42a1abb-5389-40ec-b9b3-c7e7c3ea6926"). InnerVolumeSpecName "kube-api-access-svbpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.420068 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.427428 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.435825 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.438006 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts\") pod \"cb36afed-98e0-460c-ac13-f09334f4334e\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.438430 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cb36afed-98e0-460c-ac13-f09334f4334e" (UID: "cb36afed-98e0-460c-ac13-f09334f4334e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.438493 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts\") pod \"6279d28e-2c9c-4e83-9db9-db105f664da4\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.438931 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6279d28e-2c9c-4e83-9db9-db105f664da4" (UID: "6279d28e-2c9c-4e83-9db9-db105f664da4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.438997 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts\") pod \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.439553 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" (UID: "4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.440101 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shz29\" (UniqueName: \"kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29\") pod \"cb36afed-98e0-460c-ac13-f09334f4334e\" (UID: \"cb36afed-98e0-460c-ac13-f09334f4334e\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.440625 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kldhj\" (UniqueName: \"kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj\") pod \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\" (UID: \"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.440754 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbt7q\" (UniqueName: \"kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q\") pod \"6279d28e-2c9c-4e83-9db9-db105f664da4\" (UID: \"6279d28e-2c9c-4e83-9db9-db105f664da4\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.441422 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.441749 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svbpb\" (UniqueName: \"kubernetes.io/projected/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926-kube-api-access-svbpb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.441763 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb36afed-98e0-460c-ac13-f09334f4334e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.441775 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6279d28e-2c9c-4e83-9db9-db105f664da4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.441788 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.444292 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29" (OuterVolumeSpecName: "kube-api-access-shz29") pod "cb36afed-98e0-460c-ac13-f09334f4334e" (UID: "cb36afed-98e0-460c-ac13-f09334f4334e"). InnerVolumeSpecName "kube-api-access-shz29". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.445586 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj" (OuterVolumeSpecName: "kube-api-access-kldhj") pod "4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" (UID: "4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1"). InnerVolumeSpecName "kube-api-access-kldhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.446064 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q" (OuterVolumeSpecName: "kube-api-access-bbt7q") pod "6279d28e-2c9c-4e83-9db9-db105f664da4" (UID: "6279d28e-2c9c-4e83-9db9-db105f664da4"). InnerVolumeSpecName "kube-api-access-bbt7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.447367 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543155 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543208 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vxhr\" (UniqueName: \"kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr\") pod \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543259 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543263 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8785f\" (UniqueName: \"kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543321 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts\") pod \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\" (UID: \"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543337 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run" (OuterVolumeSpecName: "var-run") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543369 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543384 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts\") pod \"63574be5-6a79-44af-b36a-2d32bd676edd\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf6cr\" (UniqueName: \"kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr\") pod \"63574be5-6a79-44af-b36a-2d32bd676edd\" (UID: \"63574be5-6a79-44af-b36a-2d32bd676edd\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543473 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2r7h\" (UniqueName: \"kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h\") pod \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543506 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn\") pod \"ba33954f-ac40-4d09-8e9b-500d59a92729\" (UID: \"ba33954f-ac40-4d09-8e9b-500d59a92729\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543527 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts\") pod \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\" (UID: \"4cae5a46-3cc2-4b42-bf1b-4c429c489f81\") " Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543825 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shz29\" (UniqueName: \"kubernetes.io/projected/cb36afed-98e0-460c-ac13-f09334f4334e-kube-api-access-shz29\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543837 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kldhj\" (UniqueName: \"kubernetes.io/projected/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1-kube-api-access-kldhj\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543851 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543878 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543888 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbt7q\" (UniqueName: \"kubernetes.io/projected/6279d28e-2c9c-4e83-9db9-db105f664da4-kube-api-access-bbt7q\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.543982 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63574be5-6a79-44af-b36a-2d32bd676edd" (UID: "63574be5-6a79-44af-b36a-2d32bd676edd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.544247 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4cae5a46-3cc2-4b42-bf1b-4c429c489f81" (UID: "4cae5a46-3cc2-4b42-bf1b-4c429c489f81"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.544329 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.544758 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" (UID: "d4ab262f-f627-4891-9fd7-efc5e0e8a2eb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.544904 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.545203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts" (OuterVolumeSpecName: "scripts") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.546462 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr" (OuterVolumeSpecName: "kube-api-access-9vxhr") pod "d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" (UID: "d4ab262f-f627-4891-9fd7-efc5e0e8a2eb"). InnerVolumeSpecName "kube-api-access-9vxhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.547303 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr" (OuterVolumeSpecName: "kube-api-access-pf6cr") pod "63574be5-6a79-44af-b36a-2d32bd676edd" (UID: "63574be5-6a79-44af-b36a-2d32bd676edd"). InnerVolumeSpecName "kube-api-access-pf6cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.547466 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f" (OuterVolumeSpecName: "kube-api-access-8785f") pod "ba33954f-ac40-4d09-8e9b-500d59a92729" (UID: "ba33954f-ac40-4d09-8e9b-500d59a92729"). InnerVolumeSpecName "kube-api-access-8785f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.547670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h" (OuterVolumeSpecName: "kube-api-access-g2r7h") pod "4cae5a46-3cc2-4b42-bf1b-4c429c489f81" (UID: "4cae5a46-3cc2-4b42-bf1b-4c429c489f81"). InnerVolumeSpecName "kube-api-access-g2r7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645801 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63574be5-6a79-44af-b36a-2d32bd676edd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645838 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf6cr\" (UniqueName: \"kubernetes.io/projected/63574be5-6a79-44af-b36a-2d32bd676edd-kube-api-access-pf6cr\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645862 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2r7h\" (UniqueName: \"kubernetes.io/projected/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-kube-api-access-g2r7h\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645874 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba33954f-ac40-4d09-8e9b-500d59a92729-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645883 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4cae5a46-3cc2-4b42-bf1b-4c429c489f81-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645893 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vxhr\" (UniqueName: \"kubernetes.io/projected/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-kube-api-access-9vxhr\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645901 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8785f\" (UniqueName: \"kubernetes.io/projected/ba33954f-ac40-4d09-8e9b-500d59a92729-kube-api-access-8785f\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645910 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645920 4982 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.645930 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba33954f-ac40-4d09-8e9b-500d59a92729-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.983170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-2nvzk" event={"ID":"4cae5a46-3cc2-4b42-bf1b-4c429c489f81","Type":"ContainerDied","Data":"8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37"} Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.983429 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d33bfc3a78ed4574f59d8c974c3a9022f99845a74acc90814f64822f6400c37" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.983191 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-2nvzk" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.985124 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-4zhph" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.985154 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-4zhph" event={"ID":"cb36afed-98e0-460c-ac13-f09334f4334e","Type":"ContainerDied","Data":"a74952a742fa6a3b12fbde3bb7a0b9b0664e79ffc6e52f90d5abcbef736988e0"} Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.985201 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a74952a742fa6a3b12fbde3bb7a0b9b0664e79ffc6e52f90d5abcbef736988e0" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.991653 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8a3c-account-create-update-q6nqg" event={"ID":"4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1","Type":"ContainerDied","Data":"f517eba84e83fa33eb46ceee1ae1c4881e11bfe3ec9bb017c2c60554dd3986ed"} Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.991694 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f517eba84e83fa33eb46ceee1ae1c4881e11bfe3ec9bb017c2c60554dd3986ed" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.991775 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8a3c-account-create-update-q6nqg" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.996752 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x2w8g" Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.996795 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x2w8g" event={"ID":"c42a1abb-5389-40ec-b9b3-c7e7c3ea6926","Type":"ContainerDied","Data":"e491a1a73de0ffbf1d70001f8368865965449b8e7022b2e50fe113ea6e4ae427"} Jan 22 06:06:28 crc kubenswrapper[4982]: I0122 06:06:28.997130 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e491a1a73de0ffbf1d70001f8368865965449b8e7022b2e50fe113ea6e4ae427" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.004079 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vdfzh" event={"ID":"6e75f34c-23aa-46c5-a382-7ff49efa327e","Type":"ContainerStarted","Data":"7513f113fd8c4c67cbc53fb0a733846da65c7fce1193f50b23fc013ac9b0faa4"} Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.005837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4mft6" event={"ID":"d4ab262f-f627-4891-9fd7-efc5e0e8a2eb","Type":"ContainerDied","Data":"209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54"} Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.005890 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="209c9219b962d916aa47847c0525a61a8a20d3794a21c063c00787678c2c6c54" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.005893 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4mft6" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.015268 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a9e6-account-create-update-nmnpj" event={"ID":"6279d28e-2c9c-4e83-9db9-db105f664da4","Type":"ContainerDied","Data":"bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d"} Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.015314 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb020ab6f2af1416adaaba071c0f6ae75c17cc6250d520e0a8acdacde6b6b59d" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.015293 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a9e6-account-create-update-nmnpj" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.018182 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-fmr98" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.018190 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-fmr98" event={"ID":"ba33954f-ac40-4d09-8e9b-500d59a92729","Type":"ContainerDied","Data":"b8a02058e38ca97279728c3fbcc050a12b457e43c989fa44ce7019119fb31b3c"} Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.018224 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8a02058e38ca97279728c3fbcc050a12b457e43c989fa44ce7019119fb31b3c" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.019881 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nfq8n" event={"ID":"63574be5-6a79-44af-b36a-2d32bd676edd","Type":"ContainerDied","Data":"19bb62bac6623b870651ce0f3d7a5189380c6376a8590b7f6a93e9cf18f06546"} Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.019915 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19bb62bac6623b870651ce0f3d7a5189380c6376a8590b7f6a93e9cf18f06546" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.019976 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nfq8n" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.037173 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-vdfzh" podStartSLOduration=2.346061524 podStartE2EDuration="8.03705035s" podCreationTimestamp="2026-01-22 06:06:21 +0000 UTC" firstStartedPulling="2026-01-22 06:06:22.485272875 +0000 UTC m=+1243.323910878" lastFinishedPulling="2026-01-22 06:06:28.176261691 +0000 UTC m=+1249.014899704" observedRunningTime="2026-01-22 06:06:29.032139903 +0000 UTC m=+1249.870777906" watchObservedRunningTime="2026-01-22 06:06:29.03705035 +0000 UTC m=+1249.875688353" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.554205 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4z48g-config-fmr98"] Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.572775 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4z48g-config-fmr98"] Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.627846 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4z48g-config-kx2vg"] Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629303 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb36afed-98e0-460c-ac13-f09334f4334e" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629326 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb36afed-98e0-460c-ac13-f09334f4334e" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629342 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629351 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629362 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba33954f-ac40-4d09-8e9b-500d59a92729" containerName="ovn-config" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629369 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba33954f-ac40-4d09-8e9b-500d59a92729" containerName="ovn-config" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629377 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63574be5-6a79-44af-b36a-2d32bd676edd" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629384 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63574be5-6a79-44af-b36a-2d32bd676edd" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629399 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6279d28e-2c9c-4e83-9db9-db105f664da4" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629406 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6279d28e-2c9c-4e83-9db9-db105f664da4" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629429 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629437 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629451 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cae5a46-3cc2-4b42-bf1b-4c429c489f81" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629458 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cae5a46-3cc2-4b42-bf1b-4c429c489f81" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: E0122 06:06:29.629468 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629474 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629646 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629665 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6279d28e-2c9c-4e83-9db9-db105f664da4" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629679 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cae5a46-3cc2-4b42-bf1b-4c429c489f81" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629688 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb36afed-98e0-460c-ac13-f09334f4334e" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629698 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba33954f-ac40-4d09-8e9b-500d59a92729" containerName="ovn-config" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629706 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629723 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" containerName="mariadb-account-create-update" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.629732 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="63574be5-6a79-44af-b36a-2d32bd676edd" containerName="mariadb-database-create" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.630372 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.639765 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.648422 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g-config-kx2vg"] Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.735665 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba33954f-ac40-4d09-8e9b-500d59a92729" path="/var/lib/kubelet/pods/ba33954f-ac40-4d09-8e9b-500d59a92729/volumes" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765711 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765762 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765813 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765829 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765896 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns74c\" (UniqueName: \"kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.765915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867074 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867137 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns74c\" (UniqueName: \"kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867158 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867222 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867676 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867797 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.867797 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.868533 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.869345 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:29 crc kubenswrapper[4982]: I0122 06:06:29.888012 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns74c\" (UniqueName: \"kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c\") pod \"ovn-controller-4z48g-config-kx2vg\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.025898 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.030567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"426ef7c3195021e87134941727b96e87ac5a055405603c85838becb9891299c1"} Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.030613 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"f4db4555825c7a98050d3fa64f127578b1f2415604cf41cb3cb729a79fc23f72"} Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.030627 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"be63c0d0c6adf69366e7b0baa98c3881df8d90e7905a24b623cae778f04a2eed"} Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.030636 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"f0e4c628f7ebdabca21ce9e2c5341af6fc155bfe4b2913c68c56bd267c6a4629"} Jan 22 06:06:30 crc kubenswrapper[4982]: I0122 06:06:30.600050 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4z48g-config-kx2vg"] Jan 22 06:06:31 crc kubenswrapper[4982]: I0122 06:06:31.045528 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-kx2vg" event={"ID":"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93","Type":"ContainerStarted","Data":"2b2c682f07eb6e2b0d623f4197d9d67fbb42000c4195c66a67f09d6f6426e9bb"} Jan 22 06:06:32 crc kubenswrapper[4982]: I0122 06:06:32.055141 4982 generic.go:334] "Generic (PLEG): container finished" podID="1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" containerID="c8f0e29e462a5168ff8d0e06215e4707292c464ad4ee80345d8a1e7288b9793e" exitCode=0 Jan 22 06:06:32 crc kubenswrapper[4982]: I0122 06:06:32.055255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-kx2vg" event={"ID":"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93","Type":"ContainerDied","Data":"c8f0e29e462a5168ff8d0e06215e4707292c464ad4ee80345d8a1e7288b9793e"} Jan 22 06:06:32 crc kubenswrapper[4982]: I0122 06:06:32.065728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"e4748ea7b210414126713179ef219e82a0abc3f56fde6ec484724799d53bea45"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.079736 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"2d157d8183c5ba1a3484d5097ce18a1c0be897ffeadde7df10cfd7adde58b171"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.080415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"21acccb66fe5a7601f47f21bf74eb39ca16b5d4458bb059e53d0b376d47e4e6e"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.080433 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"b878ca638cdba06e768d1e1407882909e1a9532997c7390f2d2cc87458ce86aa"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.080445 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"310a52e8c276d358ad53746988709b191603600640992af9703c7c83f46a3101"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.080457 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"a386b99b03239bfd525b9cda2c117bcba26367449801201fb1b1a6f2e03e42aa"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.080468 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerStarted","Data":"45257d2dda5df21dd5c33ca01b27b1606dde8ba4b98ce40f6c5bf2094829e0de"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.081345 4982 generic.go:334] "Generic (PLEG): container finished" podID="6e75f34c-23aa-46c5-a382-7ff49efa327e" containerID="7513f113fd8c4c67cbc53fb0a733846da65c7fce1193f50b23fc013ac9b0faa4" exitCode=0 Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.081532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vdfzh" event={"ID":"6e75f34c-23aa-46c5-a382-7ff49efa327e","Type":"ContainerDied","Data":"7513f113fd8c4c67cbc53fb0a733846da65c7fce1193f50b23fc013ac9b0faa4"} Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.121678 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=39.002629176 podStartE2EDuration="48.121659605s" podCreationTimestamp="2026-01-22 06:05:45 +0000 UTC" firstStartedPulling="2026-01-22 06:06:22.41800519 +0000 UTC m=+1243.256643193" lastFinishedPulling="2026-01-22 06:06:31.537035619 +0000 UTC m=+1252.375673622" observedRunningTime="2026-01-22 06:06:33.11309234 +0000 UTC m=+1253.951730343" watchObservedRunningTime="2026-01-22 06:06:33.121659605 +0000 UTC m=+1253.960297598" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.311152 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.393205 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:33 crc kubenswrapper[4982]: E0122 06:06:33.393555 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" containerName="ovn-config" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.393570 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" containerName="ovn-config" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.393716 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" containerName="ovn-config" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.394647 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.399162 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.412576 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430281 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430402 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns74c\" (UniqueName: \"kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430457 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430553 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430612 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430645 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts\") pod \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\" (UID: \"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93\") " Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430712 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run" (OuterVolumeSpecName: "var-run") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.430776 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.431021 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.431042 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.431053 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.431231 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.431634 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts" (OuterVolumeSpecName: "scripts") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.434548 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c" (OuterVolumeSpecName: "kube-api-access-ns74c") pod "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" (UID: "1a0cfe7c-d080-4e26-bd8f-20ce456e3b93"). InnerVolumeSpecName "kube-api-access-ns74c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532582 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc4xb\" (UniqueName: \"kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532778 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532835 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532905 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns74c\" (UniqueName: \"kubernetes.io/projected/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-kube-api-access-ns74c\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532942 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.532952 4982 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.634485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.634955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.634987 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635013 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc4xb\" (UniqueName: \"kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.635892 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.636343 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.636428 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.669798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc4xb\" (UniqueName: \"kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb\") pod \"dnsmasq-dns-8db84466c-kh4rj\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:33 crc kubenswrapper[4982]: I0122 06:06:33.709428 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.111565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g-config-kx2vg" event={"ID":"1a0cfe7c-d080-4e26-bd8f-20ce456e3b93","Type":"ContainerDied","Data":"2b2c682f07eb6e2b0d623f4197d9d67fbb42000c4195c66a67f09d6f6426e9bb"} Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.111958 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b2c682f07eb6e2b0d623f4197d9d67fbb42000c4195c66a67f09d6f6426e9bb" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.111603 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g-config-kx2vg" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.190809 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.374574 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.375429 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4z48g-config-kx2vg"] Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.383907 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4z48g-config-kx2vg"] Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.454262 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data\") pod \"6e75f34c-23aa-46c5-a382-7ff49efa327e\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.454372 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle\") pod \"6e75f34c-23aa-46c5-a382-7ff49efa327e\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.454440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5chtr\" (UniqueName: \"kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr\") pod \"6e75f34c-23aa-46c5-a382-7ff49efa327e\" (UID: \"6e75f34c-23aa-46c5-a382-7ff49efa327e\") " Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.458726 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr" (OuterVolumeSpecName: "kube-api-access-5chtr") pod "6e75f34c-23aa-46c5-a382-7ff49efa327e" (UID: "6e75f34c-23aa-46c5-a382-7ff49efa327e"). InnerVolumeSpecName "kube-api-access-5chtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.476589 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e75f34c-23aa-46c5-a382-7ff49efa327e" (UID: "6e75f34c-23aa-46c5-a382-7ff49efa327e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.496536 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data" (OuterVolumeSpecName: "config-data") pod "6e75f34c-23aa-46c5-a382-7ff49efa327e" (UID: "6e75f34c-23aa-46c5-a382-7ff49efa327e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.556799 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.557134 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e75f34c-23aa-46c5-a382-7ff49efa327e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:34 crc kubenswrapper[4982]: I0122 06:06:34.557144 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5chtr\" (UniqueName: \"kubernetes.io/projected/6e75f34c-23aa-46c5-a382-7ff49efa327e-kube-api-access-5chtr\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.124327 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vdfzh" event={"ID":"6e75f34c-23aa-46c5-a382-7ff49efa327e","Type":"ContainerDied","Data":"4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623"} Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.124375 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4066521c4d58b2a17e3c4c40e430ff79d582edf28519cf258553171d0cd09623" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.124399 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vdfzh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.126473 4982 generic.go:334] "Generic (PLEG): container finished" podID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerID="04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5" exitCode=0 Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.126512 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" event={"ID":"34b58f4a-a0ac-4603-b0bf-355128fa6d31","Type":"ContainerDied","Data":"04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5"} Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.126538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" event={"ID":"34b58f4a-a0ac-4603-b0bf-355128fa6d31","Type":"ContainerStarted","Data":"055b96cdfb4ec5661c830a82bed09caa61f3e621db41931df63fa6672a695b06"} Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.412353 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.426481 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-czx99"] Jan 22 06:06:35 crc kubenswrapper[4982]: E0122 06:06:35.429701 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e75f34c-23aa-46c5-a382-7ff49efa327e" containerName="keystone-db-sync" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.429730 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e75f34c-23aa-46c5-a382-7ff49efa327e" containerName="keystone-db-sync" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.429918 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e75f34c-23aa-46c5-a382-7ff49efa327e" containerName="keystone-db-sync" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.430455 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.437331 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.437559 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.437681 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.437798 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.438106 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jckfq" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.452999 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-czx99"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.494774 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.496095 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.521709 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7nzl\" (UniqueName: \"kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576449 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576500 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576521 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576578 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576606 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576636 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576668 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576822 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576945 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjhcp\" (UniqueName: \"kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.576988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.609259 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-vtfmh"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.610601 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.613386 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-sc2js" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.613641 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.617932 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.619957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.627155 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.627345 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.627480 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.636357 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-gms7h"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.637457 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.641169 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.641587 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.641708 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-sr6nk" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.648141 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gms7h"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678331 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678677 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678754 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjhcp\" (UniqueName: \"kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678781 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678823 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7nzl\" (UniqueName: \"kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678849 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678897 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678948 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.678970 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.679023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.679046 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.679061 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.679897 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.680277 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.680755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.681405 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.682254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.691912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.693838 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vtfmh"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.699741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.700474 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.713649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.720596 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.787863 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbgzc\" (UniqueName: \"kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.787960 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788004 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788058 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788083 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tbjz\" (UniqueName: \"kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788149 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drm7l\" (UniqueName: \"kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788192 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788205 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788347 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788384 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788427 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.788624 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.790128 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjhcp\" (UniqueName: \"kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp\") pod \"keystone-bootstrap-czx99\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.790502 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.797164 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.797202 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.809275 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a0cfe7c-d080-4e26-bd8f-20ce456e3b93" path="/var/lib/kubelet/pods/1a0cfe7c-d080-4e26-bd8f-20ce456e3b93/volumes" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.810132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7nzl\" (UniqueName: \"kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl\") pod \"dnsmasq-dns-767d96458c-447gg\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.846363 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.863324 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-rsp2k"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.864813 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.881380 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-crgl6" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.881683 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.918959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919029 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919042 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919062 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919153 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbgzc\" (UniqueName: \"kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919171 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919202 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919225 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919240 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919266 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tbjz\" (UniqueName: \"kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919298 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drm7l\" (UniqueName: \"kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919317 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.919330 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.926336 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.946723 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.950432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.963999 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drm7l\" (UniqueName: \"kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.965912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.966384 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.966787 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.967057 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.967272 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.967385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.967871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.975515 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " pod="openstack/ceilometer-0" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.975577 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rsp2k"] Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.976043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbgzc\" (UniqueName: \"kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.976447 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tbjz\" (UniqueName: \"kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.977657 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle\") pod \"neutron-db-sync-gms7h\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:35 crc kubenswrapper[4982]: I0122 06:06:35.997254 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-j4fnn"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:35.998660 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.001941 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lghrg" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.002426 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle\") pod \"cinder-db-sync-vtfmh\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.010444 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.010924 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.011623 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.019182 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j4fnn"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.044223 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.045563 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.046447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g88d9\" (UniqueName: \"kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.046517 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.046905 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.059362 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.094072 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.149945 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.150001 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g88d9\" (UniqueName: \"kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.150064 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.150095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nzwm\" (UniqueName: \"kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151113 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151134 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l9kk\" (UniqueName: \"kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151211 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151244 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151273 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151323 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151350 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.151499 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.158136 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.158245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.158403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" event={"ID":"34b58f4a-a0ac-4603-b0bf-355128fa6d31","Type":"ContainerStarted","Data":"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f"} Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.158541 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="dnsmasq-dns" containerID="cri-o://af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f" gracePeriod=10 Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.158777 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.167525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g88d9\" (UniqueName: \"kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9\") pod \"barbican-db-sync-rsp2k\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.182152 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" podStartSLOduration=3.182134647 podStartE2EDuration="3.182134647s" podCreationTimestamp="2026-01-22 06:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:36.181206313 +0000 UTC m=+1257.019844316" watchObservedRunningTime="2026-01-22 06:06:36.182134647 +0000 UTC m=+1257.020772650" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.230325 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.239520 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.249755 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253170 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253224 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253268 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253311 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nzwm\" (UniqueName: \"kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l9kk\" (UniqueName: \"kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253399 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253424 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253456 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253500 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.253527 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.254338 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.254662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.257627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.257788 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.259514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.260596 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.262439 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.265971 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gms7h" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.266656 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.268216 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.278323 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l9kk\" (UniqueName: \"kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk\") pod \"dnsmasq-dns-7fc6d4ffc7-fzfmb\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.278605 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nzwm\" (UniqueName: \"kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm\") pod \"placement-db-sync-j4fnn\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.326318 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j4fnn" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.423298 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.484108 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:36 crc kubenswrapper[4982]: W0122 06:06:36.527469 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72e09197_e7f0_42df_ae48_fcb660b191cd.slice/crio-b7e556cce2e24ae37ae7c042632da6b85b45f9314844a2bfd29145d13dd01f5b WatchSource:0}: Error finding container b7e556cce2e24ae37ae7c042632da6b85b45f9314844a2bfd29145d13dd01f5b: Status 404 returned error can't find the container with id b7e556cce2e24ae37ae7c042632da6b85b45f9314844a2bfd29145d13dd01f5b Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.627160 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-czx99"] Jan 22 06:06:36 crc kubenswrapper[4982]: W0122 06:06:36.654742 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24c6514b_0780_4a7b_8cb3_697fa2aad8e1.slice/crio-8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc WatchSource:0}: Error finding container 8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc: Status 404 returned error can't find the container with id 8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.671544 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:36 crc kubenswrapper[4982]: W0122 06:06:36.808668 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60dd52f0_a379_40e6_b2e3_dcd5c296bc59.slice/crio-a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0 WatchSource:0}: Error finding container a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0: Status 404 returned error can't find the container with id a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0 Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.812972 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rsp2k"] Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865625 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865671 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc4xb\" (UniqueName: \"kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865741 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865762 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865782 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.865846 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc\") pod \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\" (UID: \"34b58f4a-a0ac-4603-b0bf-355128fa6d31\") " Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.882061 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb" (OuterVolumeSpecName: "kube-api-access-gc4xb") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "kube-api-access-gc4xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:36 crc kubenswrapper[4982]: I0122 06:06:36.971357 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc4xb\" (UniqueName: \"kubernetes.io/projected/34b58f4a-a0ac-4603-b0bf-355128fa6d31-kube-api-access-gc4xb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.014986 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.015573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.030367 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vtfmh"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.074467 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.074501 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.084732 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config" (OuterVolumeSpecName: "config") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.087517 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.090977 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.095581 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "34b58f4a-a0ac-4603-b0bf-355128fa6d31" (UID: "34b58f4a-a0ac-4603-b0bf-355128fa6d31"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.102445 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j4fnn"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.113891 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:06:37 crc kubenswrapper[4982]: W0122 06:06:37.115106 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59ce449e_073f_4c66_ade6_fa8448573827.slice/crio-d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65 WatchSource:0}: Error finding container d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65: Status 404 returned error can't find the container with id d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65 Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.121065 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gms7h"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.170566 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-czx99" event={"ID":"24c6514b-0780-4a7b-8cb3-697fa2aad8e1","Type":"ContainerStarted","Data":"8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.172477 4982 generic.go:334] "Generic (PLEG): container finished" podID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerID="af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f" exitCode=0 Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.172560 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" event={"ID":"34b58f4a-a0ac-4603-b0bf-355128fa6d31","Type":"ContainerDied","Data":"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.172615 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" event={"ID":"34b58f4a-a0ac-4603-b0bf-355128fa6d31","Type":"ContainerDied","Data":"055b96cdfb4ec5661c830a82bed09caa61f3e621db41931df63fa6672a695b06"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.172633 4982 scope.go:117] "RemoveContainer" containerID="af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.172577 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-kh4rj" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.175979 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.176004 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.176020 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b58f4a-a0ac-4603-b0bf-355128fa6d31-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.176120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gms7h" event={"ID":"59ce449e-073f-4c66-ade6-fa8448573827","Type":"ContainerStarted","Data":"d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.177298 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rsp2k" event={"ID":"60dd52f0-a379-40e6-b2e3-dcd5c296bc59","Type":"ContainerStarted","Data":"a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.177997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-447gg" event={"ID":"72e09197-e7f0-42df-ae48-fcb660b191cd","Type":"ContainerStarted","Data":"b7e556cce2e24ae37ae7c042632da6b85b45f9314844a2bfd29145d13dd01f5b"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.178965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfmh" event={"ID":"1f97eee4-4c10-4f1e-8173-2f8d1c955049","Type":"ContainerStarted","Data":"4ee2626a88284bb96051262e042985ec293cc6014c7fd667ad74ecc861d28d7d"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.180135 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerStarted","Data":"06b5837124fd7018f3ef243d692555e0754bff37545f739cacc9f1c0957dc5fe"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.181287 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" event={"ID":"fad68a0e-d29d-44bb-bf31-55b501abbdcd","Type":"ContainerStarted","Data":"2dfda501dc095d7821eb835de7828636c36401a85b4cd712f44641c9a41bc41f"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.182181 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j4fnn" event={"ID":"f985360b-8866-457a-953d-2d15a1e2d20e","Type":"ContainerStarted","Data":"32c22500d229c6753016586cae84c58dab5fd44dcef17c62950ba010178751c6"} Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.195215 4982 scope.go:117] "RemoveContainer" containerID="04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.216773 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.225686 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-kh4rj"] Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.254024 4982 scope.go:117] "RemoveContainer" containerID="af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f" Jan 22 06:06:37 crc kubenswrapper[4982]: E0122 06:06:37.254715 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f\": container with ID starting with af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f not found: ID does not exist" containerID="af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.254788 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f"} err="failed to get container status \"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f\": rpc error: code = NotFound desc = could not find container \"af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f\": container with ID starting with af6d7cace0863ca22201afdee1a683de95a7e741ad851ef333ba774603059a0f not found: ID does not exist" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.254815 4982 scope.go:117] "RemoveContainer" containerID="04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5" Jan 22 06:06:37 crc kubenswrapper[4982]: E0122 06:06:37.255257 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5\": container with ID starting with 04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5 not found: ID does not exist" containerID="04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.255302 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5"} err="failed to get container status \"04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5\": rpc error: code = NotFound desc = could not find container \"04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5\": container with ID starting with 04e0fe1cd3617ad61aac84d12e9ec9a460c433fe7ca07d34c4941c6407c654a5 not found: ID does not exist" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.755697 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" path="/var/lib/kubelet/pods/34b58f4a-a0ac-4603-b0bf-355128fa6d31/volumes" Jan 22 06:06:37 crc kubenswrapper[4982]: I0122 06:06:37.988559 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.192029 4982 generic.go:334] "Generic (PLEG): container finished" podID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerID="49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9" exitCode=0 Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.192085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" event={"ID":"fad68a0e-d29d-44bb-bf31-55b501abbdcd","Type":"ContainerDied","Data":"49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9"} Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.202682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-czx99" event={"ID":"24c6514b-0780-4a7b-8cb3-697fa2aad8e1","Type":"ContainerStarted","Data":"fa8b66a769c752600146a2bd21e5db11db65361248dd5b3d212a9269c42ae42b"} Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.212487 4982 generic.go:334] "Generic (PLEG): container finished" podID="72e09197-e7f0-42df-ae48-fcb660b191cd" containerID="f18120cf3dad5de2ade2d246442352c32fe9dfaf8e5b3083123e1d4665445dde" exitCode=0 Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.214176 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-447gg" event={"ID":"72e09197-e7f0-42df-ae48-fcb660b191cd","Type":"ContainerDied","Data":"f18120cf3dad5de2ade2d246442352c32fe9dfaf8e5b3083123e1d4665445dde"} Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.224841 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z7b8c" event={"ID":"4c49756b-5f06-470b-9bc5-281b5bfbb198","Type":"ContainerStarted","Data":"e1876ee24c9fffd277bce8275d6388beac19ae4bb7662cbc6970d20f3f974e3c"} Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.227310 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gms7h" event={"ID":"59ce449e-073f-4c66-ade6-fa8448573827","Type":"ContainerStarted","Data":"f21f6b2f159515920e265fd73f4db3e8cdea1216effa74442ab287d2ab610e13"} Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.302153 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-gms7h" podStartSLOduration=3.302135861 podStartE2EDuration="3.302135861s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:38.28177278 +0000 UTC m=+1259.120410783" watchObservedRunningTime="2026-01-22 06:06:38.302135861 +0000 UTC m=+1259.140773864" Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.303664 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-czx99" podStartSLOduration=3.30365851 podStartE2EDuration="3.30365851s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:38.264467267 +0000 UTC m=+1259.103105270" watchObservedRunningTime="2026-01-22 06:06:38.30365851 +0000 UTC m=+1259.142296513" Jan 22 06:06:38 crc kubenswrapper[4982]: I0122 06:06:38.323539 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-z7b8c" podStartSLOduration=3.429642103 podStartE2EDuration="34.323470537s" podCreationTimestamp="2026-01-22 06:06:04 +0000 UTC" firstStartedPulling="2026-01-22 06:06:05.409563636 +0000 UTC m=+1226.248201639" lastFinishedPulling="2026-01-22 06:06:36.30339207 +0000 UTC m=+1257.142030073" observedRunningTime="2026-01-22 06:06:38.29522589 +0000 UTC m=+1259.133863893" watchObservedRunningTime="2026-01-22 06:06:38.323470537 +0000 UTC m=+1259.162108530" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.620826 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808218 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808255 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808372 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808498 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7nzl\" (UniqueName: \"kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.808568 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0\") pod \"72e09197-e7f0-42df-ae48-fcb660b191cd\" (UID: \"72e09197-e7f0-42df-ae48-fcb660b191cd\") " Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.831144 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl" (OuterVolumeSpecName: "kube-api-access-w7nzl") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "kube-api-access-w7nzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.860244 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config" (OuterVolumeSpecName: "config") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.881637 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.882873 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.884160 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.895299 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72e09197-e7f0-42df-ae48-fcb660b191cd" (UID: "72e09197-e7f0-42df-ae48-fcb660b191cd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.910953 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7nzl\" (UniqueName: \"kubernetes.io/projected/72e09197-e7f0-42df-ae48-fcb660b191cd-kube-api-access-w7nzl\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.910981 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.910991 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.911001 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.911009 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:38.911018 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72e09197-e7f0-42df-ae48-fcb660b191cd-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.253944 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" event={"ID":"fad68a0e-d29d-44bb-bf31-55b501abbdcd","Type":"ContainerStarted","Data":"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862"} Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.254015 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.256446 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-447gg" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.256979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-447gg" event={"ID":"72e09197-e7f0-42df-ae48-fcb660b191cd","Type":"ContainerDied","Data":"b7e556cce2e24ae37ae7c042632da6b85b45f9314844a2bfd29145d13dd01f5b"} Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.257029 4982 scope.go:117] "RemoveContainer" containerID="f18120cf3dad5de2ade2d246442352c32fe9dfaf8e5b3083123e1d4665445dde" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.277972 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" podStartSLOduration=4.277950651 podStartE2EDuration="4.277950651s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:39.275263981 +0000 UTC m=+1260.113902004" watchObservedRunningTime="2026-01-22 06:06:39.277950651 +0000 UTC m=+1260.116588654" Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.371346 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.381443 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-447gg"] Jan 22 06:06:39 crc kubenswrapper[4982]: I0122 06:06:39.730335 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72e09197-e7f0-42df-ae48-fcb660b191cd" path="/var/lib/kubelet/pods/72e09197-e7f0-42df-ae48-fcb660b191cd/volumes" Jan 22 06:06:42 crc kubenswrapper[4982]: I0122 06:06:42.311247 4982 generic.go:334] "Generic (PLEG): container finished" podID="24c6514b-0780-4a7b-8cb3-697fa2aad8e1" containerID="fa8b66a769c752600146a2bd21e5db11db65361248dd5b3d212a9269c42ae42b" exitCode=0 Jan 22 06:06:42 crc kubenswrapper[4982]: I0122 06:06:42.311764 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-czx99" event={"ID":"24c6514b-0780-4a7b-8cb3-697fa2aad8e1","Type":"ContainerDied","Data":"fa8b66a769c752600146a2bd21e5db11db65361248dd5b3d212a9269c42ae42b"} Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.572011 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.583685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.583828 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.583897 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjhcp\" (UniqueName: \"kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.584009 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.584036 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.585163 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle\") pod \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\" (UID: \"24c6514b-0780-4a7b-8cb3-697fa2aad8e1\") " Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.593089 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.594692 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.595917 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts" (OuterVolumeSpecName: "scripts") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.613418 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp" (OuterVolumeSpecName: "kube-api-access-mjhcp") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "kube-api-access-mjhcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.623137 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data" (OuterVolumeSpecName: "config-data") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.629903 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24c6514b-0780-4a7b-8cb3-697fa2aad8e1" (UID: "24c6514b-0780-4a7b-8cb3-697fa2aad8e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687618 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687652 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687670 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjhcp\" (UniqueName: \"kubernetes.io/projected/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-kube-api-access-mjhcp\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687679 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687689 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:45 crc kubenswrapper[4982]: I0122 06:06:45.687698 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24c6514b-0780-4a7b-8cb3-697fa2aad8e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.353815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-czx99" event={"ID":"24c6514b-0780-4a7b-8cb3-697fa2aad8e1","Type":"ContainerDied","Data":"8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc"} Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.353882 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cb57d5e2e13ae600788445714c7256a418b92bbbd46e94ad58c503149dc72fc" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.353947 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-czx99" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.425096 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.523033 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.523579 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="dnsmasq-dns" containerID="cri-o://cd46c815d9750e347bd39cdb53bc6764234032f78ba9d8b386f5799824377438" gracePeriod=10 Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.659328 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-czx99"] Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.665867 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-czx99"] Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.756786 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-z27bt"] Jan 22 06:06:46 crc kubenswrapper[4982]: E0122 06:06:46.757094 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="dnsmasq-dns" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757111 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="dnsmasq-dns" Jan 22 06:06:46 crc kubenswrapper[4982]: E0122 06:06:46.757128 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e09197-e7f0-42df-ae48-fcb660b191cd" containerName="init" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757135 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e09197-e7f0-42df-ae48-fcb660b191cd" containerName="init" Jan 22 06:06:46 crc kubenswrapper[4982]: E0122 06:06:46.757154 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24c6514b-0780-4a7b-8cb3-697fa2aad8e1" containerName="keystone-bootstrap" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757160 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="24c6514b-0780-4a7b-8cb3-697fa2aad8e1" containerName="keystone-bootstrap" Jan 22 06:06:46 crc kubenswrapper[4982]: E0122 06:06:46.757177 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="init" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757183 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="init" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757319 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="24c6514b-0780-4a7b-8cb3-697fa2aad8e1" containerName="keystone-bootstrap" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757336 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b58f4a-a0ac-4603-b0bf-355128fa6d31" containerName="dnsmasq-dns" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757353 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="72e09197-e7f0-42df-ae48-fcb660b191cd" containerName="init" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.757834 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.760711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.761141 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.761431 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.761592 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.761737 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jckfq" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.771869 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-z27bt"] Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908249 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908304 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908349 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908575 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:46 crc kubenswrapper[4982]: I0122 06:06:46.908650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqvk9\" (UniqueName: \"kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.009325 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqvk9\" (UniqueName: \"kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.009398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.009431 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.009462 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.009488 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.010261 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.015217 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.015370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.017197 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.020329 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.022776 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.025322 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqvk9\" (UniqueName: \"kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9\") pod \"keystone-bootstrap-z27bt\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.091816 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.365079 4982 generic.go:334] "Generic (PLEG): container finished" podID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerID="cd46c815d9750e347bd39cdb53bc6764234032f78ba9d8b386f5799824377438" exitCode=0 Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.365127 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" event={"ID":"e5d4bc07-60ec-4fc9-9725-d80b94777e28","Type":"ContainerDied","Data":"cd46c815d9750e347bd39cdb53bc6764234032f78ba9d8b386f5799824377438"} Jan 22 06:06:47 crc kubenswrapper[4982]: I0122 06:06:47.740780 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24c6514b-0780-4a7b-8cb3-697fa2aad8e1" path="/var/lib/kubelet/pods/24c6514b-0780-4a7b-8cb3-697fa2aad8e1/volumes" Jan 22 06:06:48 crc kubenswrapper[4982]: I0122 06:06:48.974258 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:06:48 crc kubenswrapper[4982]: I0122 06:06:48.974313 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:06:48 crc kubenswrapper[4982]: I0122 06:06:48.974351 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:06:48 crc kubenswrapper[4982]: I0122 06:06:48.975038 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:06:48 crc kubenswrapper[4982]: I0122 06:06:48.975088 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc" gracePeriod=600 Jan 22 06:06:49 crc kubenswrapper[4982]: I0122 06:06:49.383758 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc" exitCode=0 Jan 22 06:06:49 crc kubenswrapper[4982]: I0122 06:06:49.383799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc"} Jan 22 06:06:49 crc kubenswrapper[4982]: I0122 06:06:49.383830 4982 scope.go:117] "RemoveContainer" containerID="249cd03cbc96f310822cba8e3d1a005f3e86ad00b55fa38e84dd3ee093cec041" Jan 22 06:06:51 crc kubenswrapper[4982]: I0122 06:06:51.218108 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: connect: connection refused" Jan 22 06:06:55 crc kubenswrapper[4982]: I0122 06:06:55.429444 4982 generic.go:334] "Generic (PLEG): container finished" podID="4c49756b-5f06-470b-9bc5-281b5bfbb198" containerID="e1876ee24c9fffd277bce8275d6388beac19ae4bb7662cbc6970d20f3f974e3c" exitCode=0 Jan 22 06:06:55 crc kubenswrapper[4982]: I0122 06:06:55.429561 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z7b8c" event={"ID":"4c49756b-5f06-470b-9bc5-281b5bfbb198","Type":"ContainerDied","Data":"e1876ee24c9fffd277bce8275d6388beac19ae4bb7662cbc6970d20f3f974e3c"} Jan 22 06:06:56 crc kubenswrapper[4982]: E0122 06:06:56.032837 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Jan 22 06:06:56 crc kubenswrapper[4982]: E0122 06:06:56.033408 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6tbjz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-vtfmh_openstack(1f97eee4-4c10-4f1e-8173-2f8d1c955049): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 06:06:56 crc kubenswrapper[4982]: E0122 06:06:56.034702 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-vtfmh" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.118346 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.272408 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb\") pod \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.272687 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb\") pod \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.272739 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc\") pod \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.272807 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm6rn\" (UniqueName: \"kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn\") pod \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.272881 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config\") pod \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\" (UID: \"e5d4bc07-60ec-4fc9-9725-d80b94777e28\") " Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.276994 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn" (OuterVolumeSpecName: "kube-api-access-bm6rn") pod "e5d4bc07-60ec-4fc9-9725-d80b94777e28" (UID: "e5d4bc07-60ec-4fc9-9725-d80b94777e28"). InnerVolumeSpecName "kube-api-access-bm6rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.327468 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e5d4bc07-60ec-4fc9-9725-d80b94777e28" (UID: "e5d4bc07-60ec-4fc9-9725-d80b94777e28"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.342646 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config" (OuterVolumeSpecName: "config") pod "e5d4bc07-60ec-4fc9-9725-d80b94777e28" (UID: "e5d4bc07-60ec-4fc9-9725-d80b94777e28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.372083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e5d4bc07-60ec-4fc9-9725-d80b94777e28" (UID: "e5d4bc07-60ec-4fc9-9725-d80b94777e28"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.375397 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm6rn\" (UniqueName: \"kubernetes.io/projected/e5d4bc07-60ec-4fc9-9725-d80b94777e28-kube-api-access-bm6rn\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.375498 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.375567 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.375627 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.391066 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e5d4bc07-60ec-4fc9-9725-d80b94777e28" (UID: "e5d4bc07-60ec-4fc9-9725-d80b94777e28"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.440001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j4fnn" event={"ID":"f985360b-8866-457a-953d-2d15a1e2d20e","Type":"ContainerStarted","Data":"60c7b99cef0a8b8fb2b268e986ee0042e7076f1e35e494fec4e8f5a27613ec40"} Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.443969 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771"} Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.446581 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.446592 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-xxcdd" event={"ID":"e5d4bc07-60ec-4fc9-9725-d80b94777e28","Type":"ContainerDied","Data":"d0c9a5aec612d324396014a61b246930d83e9078d0a9f1d020de34b563a12513"} Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.446668 4982 scope.go:117] "RemoveContainer" containerID="cd46c815d9750e347bd39cdb53bc6764234032f78ba9d8b386f5799824377438" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.447925 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerStarted","Data":"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd"} Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.452013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rsp2k" event={"ID":"60dd52f0-a379-40e6-b2e3-dcd5c296bc59","Type":"ContainerStarted","Data":"f03d5187559edee59c68b9b8f3273c1775dd301a33e96ee5797bda602ce3d7bc"} Jan 22 06:06:56 crc kubenswrapper[4982]: E0122 06:06:56.457798 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-vtfmh" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.469912 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-z27bt"] Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.474390 4982 scope.go:117] "RemoveContainer" containerID="26cb56f170d7707dc66cad45b6af1d1edc1928c274d8d16583356e98c5ce26ef" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.474923 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-j4fnn" podStartSLOduration=2.554651417 podStartE2EDuration="21.474902645s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="2026-01-22 06:06:37.106628408 +0000 UTC m=+1257.945266411" lastFinishedPulling="2026-01-22 06:06:56.026879636 +0000 UTC m=+1276.865517639" observedRunningTime="2026-01-22 06:06:56.464539505 +0000 UTC m=+1277.303177518" watchObservedRunningTime="2026-01-22 06:06:56.474902645 +0000 UTC m=+1277.313540658" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.479213 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5d4bc07-60ec-4fc9-9725-d80b94777e28-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.528448 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-rsp2k" podStartSLOduration=2.393240207 podStartE2EDuration="21.528428032s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="2026-01-22 06:06:36.82845731 +0000 UTC m=+1257.667095313" lastFinishedPulling="2026-01-22 06:06:55.963645115 +0000 UTC m=+1276.802283138" observedRunningTime="2026-01-22 06:06:56.519900979 +0000 UTC m=+1277.358539002" watchObservedRunningTime="2026-01-22 06:06:56.528428032 +0000 UTC m=+1277.367066035" Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.558948 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.569581 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-xxcdd"] Jan 22 06:06:56 crc kubenswrapper[4982]: I0122 06:06:56.908976 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.091176 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle\") pod \"4c49756b-5f06-470b-9bc5-281b5bfbb198\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.091342 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data\") pod \"4c49756b-5f06-470b-9bc5-281b5bfbb198\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.091432 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfjfm\" (UniqueName: \"kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm\") pod \"4c49756b-5f06-470b-9bc5-281b5bfbb198\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.091526 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data\") pod \"4c49756b-5f06-470b-9bc5-281b5bfbb198\" (UID: \"4c49756b-5f06-470b-9bc5-281b5bfbb198\") " Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.096551 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4c49756b-5f06-470b-9bc5-281b5bfbb198" (UID: "4c49756b-5f06-470b-9bc5-281b5bfbb198"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.109500 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm" (OuterVolumeSpecName: "kube-api-access-zfjfm") pod "4c49756b-5f06-470b-9bc5-281b5bfbb198" (UID: "4c49756b-5f06-470b-9bc5-281b5bfbb198"). InnerVolumeSpecName "kube-api-access-zfjfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.130554 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c49756b-5f06-470b-9bc5-281b5bfbb198" (UID: "4c49756b-5f06-470b-9bc5-281b5bfbb198"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.156599 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data" (OuterVolumeSpecName: "config-data") pod "4c49756b-5f06-470b-9bc5-281b5bfbb198" (UID: "4c49756b-5f06-470b-9bc5-281b5bfbb198"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.193749 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.193782 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.193792 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfjfm\" (UniqueName: \"kubernetes.io/projected/4c49756b-5f06-470b-9bc5-281b5bfbb198-kube-api-access-zfjfm\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.194097 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c49756b-5f06-470b-9bc5-281b5bfbb198-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.489240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z7b8c" event={"ID":"4c49756b-5f06-470b-9bc5-281b5bfbb198","Type":"ContainerDied","Data":"6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39"} Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.489279 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a3b7bc3fc11e7a387f059fed586d444313270785dac7ec63bef01f808835e39" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.489342 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z7b8c" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.491807 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-z27bt" event={"ID":"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1","Type":"ContainerStarted","Data":"6a0c35338e2f5b356715e798df704abdc39b26477bc7c13626e347a37d3bb802"} Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.491876 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-z27bt" event={"ID":"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1","Type":"ContainerStarted","Data":"f33be20111b91b65aa462322f86df1bc84918b39bddf71ba500c7e1afdacb7c9"} Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.517458 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-z27bt" podStartSLOduration=11.517439497 podStartE2EDuration="11.517439497s" podCreationTimestamp="2026-01-22 06:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:06:57.510450505 +0000 UTC m=+1278.349088568" watchObservedRunningTime="2026-01-22 06:06:57.517439497 +0000 UTC m=+1278.356077500" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.743131 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" path="/var/lib/kubelet/pods/e5d4bc07-60ec-4fc9-9725-d80b94777e28/volumes" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835089 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:06:57 crc kubenswrapper[4982]: E0122 06:06:57.835480 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="dnsmasq-dns" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835499 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="dnsmasq-dns" Jan 22 06:06:57 crc kubenswrapper[4982]: E0122 06:06:57.835523 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="init" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835530 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="init" Jan 22 06:06:57 crc kubenswrapper[4982]: E0122 06:06:57.835551 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" containerName="glance-db-sync" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835557 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" containerName="glance-db-sync" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835701 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" containerName="glance-db-sync" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.835718 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d4bc07-60ec-4fc9-9725-d80b94777e28" containerName="dnsmasq-dns" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.836496 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:57 crc kubenswrapper[4982]: I0122 06:06:57.861189 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.010845 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r25s4\" (UniqueName: \"kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.010983 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.011217 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.011281 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.011450 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.011533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.113532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.113922 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.113982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r25s4\" (UniqueName: \"kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.114011 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.114100 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.114137 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.114487 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.115113 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.115305 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.115376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.115455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.132802 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r25s4\" (UniqueName: \"kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4\") pod \"dnsmasq-dns-6f6f8cb849-97n8z\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.154258 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.500742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerStarted","Data":"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be"} Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.770340 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.772938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.774778 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jk7tf" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.782330 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.782506 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.787138 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.864233 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.933827 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934173 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934191 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ssfc\" (UniqueName: \"kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934233 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934254 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934274 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:58 crc kubenswrapper[4982]: I0122 06:06:58.934290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035691 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ssfc\" (UniqueName: \"kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035845 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035879 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.035921 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.036202 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.036293 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.036378 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.040517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.041317 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.041687 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.056592 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ssfc\" (UniqueName: \"kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.061039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.065041 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.073807 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.085648 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.110570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.114060 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.137816 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138001 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138136 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twdcl\" (UniqueName: \"kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138409 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.138498 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.239906 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twdcl\" (UniqueName: \"kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240458 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240478 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.240516 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.242078 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.242505 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.245461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.253674 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.254816 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.277581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twdcl\" (UniqueName: \"kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.284309 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.514636 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerID="0c7b4d7f0102c35b4afdc72cc418d4cad10989c330476cb5c780f0b64f1a12cf" exitCode=0 Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.514842 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" event={"ID":"2d901ec6-c01e-4859-930d-04e5ceac9e04","Type":"ContainerDied","Data":"0c7b4d7f0102c35b4afdc72cc418d4cad10989c330476cb5c780f0b64f1a12cf"} Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.515650 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" event={"ID":"2d901ec6-c01e-4859-930d-04e5ceac9e04","Type":"ContainerStarted","Data":"b8830ec87ed33155338470d14011973c05fa79d637b00dfb3aaa69e5d2262727"} Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.520582 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:06:59 crc kubenswrapper[4982]: I0122 06:06:59.685315 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:06:59 crc kubenswrapper[4982]: W0122 06:06:59.701816 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4295ac1c_eaee_4720_8384_c112e1f991c3.slice/crio-0c6663be1096a35b36095bf3ffa04534342cad0e34b2ce5085ee0b4a9fc534ce WatchSource:0}: Error finding container 0c6663be1096a35b36095bf3ffa04534342cad0e34b2ce5085ee0b4a9fc534ce: Status 404 returned error can't find the container with id 0c6663be1096a35b36095bf3ffa04534342cad0e34b2ce5085ee0b4a9fc534ce Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.085873 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:00 crc kubenswrapper[4982]: W0122 06:07:00.102400 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9e5c13b_ae97_4be3_bb94_f33eba693490.slice/crio-ea0587678d0c4bb5a3043a35cef43525888e926bfbd272e2417d116d2d019893 WatchSource:0}: Error finding container ea0587678d0c4bb5a3043a35cef43525888e926bfbd272e2417d116d2d019893: Status 404 returned error can't find the container with id ea0587678d0c4bb5a3043a35cef43525888e926bfbd272e2417d116d2d019893 Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.532368 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" event={"ID":"2d901ec6-c01e-4859-930d-04e5ceac9e04","Type":"ContainerStarted","Data":"f72c805ca08d3bd910a315f7dab4023cf167defa0bca0344bec478ae4f6e0be2"} Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.532605 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.536564 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerStarted","Data":"8e5b2372fdc2ebf629039f88a64b31e52f7a42c6b46129b88e0396142ec25b0d"} Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.536596 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerStarted","Data":"0c6663be1096a35b36095bf3ffa04534342cad0e34b2ce5085ee0b4a9fc534ce"} Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.538808 4982 generic.go:334] "Generic (PLEG): container finished" podID="96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" containerID="6a0c35338e2f5b356715e798df704abdc39b26477bc7c13626e347a37d3bb802" exitCode=0 Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.538888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-z27bt" event={"ID":"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1","Type":"ContainerDied","Data":"6a0c35338e2f5b356715e798df704abdc39b26477bc7c13626e347a37d3bb802"} Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.540675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerStarted","Data":"ea0587678d0c4bb5a3043a35cef43525888e926bfbd272e2417d116d2d019893"} Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.552925 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" podStartSLOduration=3.5529070369999998 podStartE2EDuration="3.552907037s" podCreationTimestamp="2026-01-22 06:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:00.549924439 +0000 UTC m=+1281.388562442" watchObservedRunningTime="2026-01-22 06:07:00.552907037 +0000 UTC m=+1281.391545040" Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.800890 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:00 crc kubenswrapper[4982]: I0122 06:07:00.869811 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.558644 4982 generic.go:334] "Generic (PLEG): container finished" podID="f985360b-8866-457a-953d-2d15a1e2d20e" containerID="60c7b99cef0a8b8fb2b268e986ee0042e7076f1e35e494fec4e8f5a27613ec40" exitCode=0 Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.558682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j4fnn" event={"ID":"f985360b-8866-457a-953d-2d15a1e2d20e","Type":"ContainerDied","Data":"60c7b99cef0a8b8fb2b268e986ee0042e7076f1e35e494fec4e8f5a27613ec40"} Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.561591 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerStarted","Data":"66dfe5ae987047ad7f7c338700f8f170b47b869ff3d7bdebfec0d21c7dba57c6"} Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.561667 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-log" containerID="cri-o://8e5b2372fdc2ebf629039f88a64b31e52f7a42c6b46129b88e0396142ec25b0d" gracePeriod=30 Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.561703 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-httpd" containerID="cri-o://66dfe5ae987047ad7f7c338700f8f170b47b869ff3d7bdebfec0d21c7dba57c6" gracePeriod=30 Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.595199 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-log" containerID="cri-o://1c5ea6287441bc97b99897a7b50c220d856b9c60344059b7433b07707241baa3" gracePeriod=30 Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.595317 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerStarted","Data":"b1f6829efdd2f047dac5bc31975e51c800d162196de5241830291198d42a35ae"} Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.595342 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerStarted","Data":"1c5ea6287441bc97b99897a7b50c220d856b9c60344059b7433b07707241baa3"} Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.595504 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-httpd" containerID="cri-o://b1f6829efdd2f047dac5bc31975e51c800d162196de5241830291198d42a35ae" gracePeriod=30 Jan 22 06:07:01 crc kubenswrapper[4982]: I0122 06:07:01.611763 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.611743054 podStartE2EDuration="4.611743054s" podCreationTimestamp="2026-01-22 06:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:01.600052118 +0000 UTC m=+1282.438690121" watchObservedRunningTime="2026-01-22 06:07:01.611743054 +0000 UTC m=+1282.450381057" Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.611759 4982 generic.go:334] "Generic (PLEG): container finished" podID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerID="66dfe5ae987047ad7f7c338700f8f170b47b869ff3d7bdebfec0d21c7dba57c6" exitCode=0 Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.612051 4982 generic.go:334] "Generic (PLEG): container finished" podID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerID="8e5b2372fdc2ebf629039f88a64b31e52f7a42c6b46129b88e0396142ec25b0d" exitCode=143 Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.611832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerDied","Data":"66dfe5ae987047ad7f7c338700f8f170b47b869ff3d7bdebfec0d21c7dba57c6"} Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.612126 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerDied","Data":"8e5b2372fdc2ebf629039f88a64b31e52f7a42c6b46129b88e0396142ec25b0d"} Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.615550 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerID="b1f6829efdd2f047dac5bc31975e51c800d162196de5241830291198d42a35ae" exitCode=0 Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.615580 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerID="1c5ea6287441bc97b99897a7b50c220d856b9c60344059b7433b07707241baa3" exitCode=143 Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.615618 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerDied","Data":"b1f6829efdd2f047dac5bc31975e51c800d162196de5241830291198d42a35ae"} Jan 22 06:07:02 crc kubenswrapper[4982]: I0122 06:07:02.615646 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerDied","Data":"1c5ea6287441bc97b99897a7b50c220d856b9c60344059b7433b07707241baa3"} Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.463804 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j4fnn" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.469659 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.481531 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.481513778 podStartE2EDuration="5.481513778s" podCreationTimestamp="2026-01-22 06:06:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:01.632095444 +0000 UTC m=+1282.470733457" watchObservedRunningTime="2026-01-22 06:07:03.481513778 +0000 UTC m=+1284.320151781" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588503 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588574 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588650 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588684 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs\") pod \"f985360b-8866-457a-953d-2d15a1e2d20e\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588765 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data\") pod \"f985360b-8866-457a-953d-2d15a1e2d20e\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588809 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle\") pod \"f985360b-8866-457a-953d-2d15a1e2d20e\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588843 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nzwm\" (UniqueName: \"kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm\") pod \"f985360b-8866-457a-953d-2d15a1e2d20e\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588906 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqvk9\" (UniqueName: \"kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.588956 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data\") pod \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\" (UID: \"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.589069 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts\") pod \"f985360b-8866-457a-953d-2d15a1e2d20e\" (UID: \"f985360b-8866-457a-953d-2d15a1e2d20e\") " Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.589582 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs" (OuterVolumeSpecName: "logs") pod "f985360b-8866-457a-953d-2d15a1e2d20e" (UID: "f985360b-8866-457a-953d-2d15a1e2d20e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.598092 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.598193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.634508 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts" (OuterVolumeSpecName: "scripts") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.634534 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm" (OuterVolumeSpecName: "kube-api-access-8nzwm") pod "f985360b-8866-457a-953d-2d15a1e2d20e" (UID: "f985360b-8866-457a-953d-2d15a1e2d20e"). InnerVolumeSpecName "kube-api-access-8nzwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.641750 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts" (OuterVolumeSpecName: "scripts") pod "f985360b-8866-457a-953d-2d15a1e2d20e" (UID: "f985360b-8866-457a-953d-2d15a1e2d20e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.654027 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9" (OuterVolumeSpecName: "kube-api-access-lqvk9") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "kube-api-access-lqvk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.666075 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data" (OuterVolumeSpecName: "config-data") pod "f985360b-8866-457a-953d-2d15a1e2d20e" (UID: "f985360b-8866-457a-953d-2d15a1e2d20e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.674350 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j4fnn" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.674778 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j4fnn" event={"ID":"f985360b-8866-457a-953d-2d15a1e2d20e","Type":"ContainerDied","Data":"32c22500d229c6753016586cae84c58dab5fd44dcef17c62950ba010178751c6"} Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.674830 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32c22500d229c6753016586cae84c58dab5fd44dcef17c62950ba010178751c6" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.676967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f985360b-8866-457a-953d-2d15a1e2d20e" (UID: "f985360b-8866-457a-953d-2d15a1e2d20e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.678933 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-z27bt" event={"ID":"96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1","Type":"ContainerDied","Data":"f33be20111b91b65aa462322f86df1bc84918b39bddf71ba500c7e1afdacb7c9"} Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.678972 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f33be20111b91b65aa462322f86df1bc84918b39bddf71ba500c7e1afdacb7c9" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.679036 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-z27bt" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.688144 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data" (OuterVolumeSpecName: "config-data") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691872 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691896 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691907 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691915 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691926 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f985360b-8866-457a-953d-2d15a1e2d20e-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691934 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691942 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f985360b-8866-457a-953d-2d15a1e2d20e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691953 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nzwm\" (UniqueName: \"kubernetes.io/projected/f985360b-8866-457a-953d-2d15a1e2d20e-kube-api-access-8nzwm\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691961 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqvk9\" (UniqueName: \"kubernetes.io/projected/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-kube-api-access-lqvk9\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.691969 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.694495 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" (UID: "96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.695010 4982 generic.go:334] "Generic (PLEG): container finished" podID="60dd52f0-a379-40e6-b2e3-dcd5c296bc59" containerID="f03d5187559edee59c68b9b8f3273c1775dd301a33e96ee5797bda602ce3d7bc" exitCode=0 Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.695047 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rsp2k" event={"ID":"60dd52f0-a379-40e6-b2e3-dcd5c296bc59","Type":"ContainerDied","Data":"f03d5187559edee59c68b9b8f3273c1775dd301a33e96ee5797bda602ce3d7bc"} Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.699007 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:07:03 crc kubenswrapper[4982]: E0122 06:07:03.699318 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f985360b-8866-457a-953d-2d15a1e2d20e" containerName="placement-db-sync" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.699335 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f985360b-8866-457a-953d-2d15a1e2d20e" containerName="placement-db-sync" Jan 22 06:07:03 crc kubenswrapper[4982]: E0122 06:07:03.699352 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" containerName="keystone-bootstrap" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.699358 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" containerName="keystone-bootstrap" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.699507 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" containerName="keystone-bootstrap" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.699533 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f985360b-8866-457a-953d-2d15a1e2d20e" containerName="placement-db-sync" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.700340 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.702788 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.702944 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.712771 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793725 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793770 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793792 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793819 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793836 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793931 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brr4z\" (UniqueName: \"kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.793967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.794053 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897388 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897417 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897441 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brr4z\" (UniqueName: \"kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.897506 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.898122 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.902361 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.902858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.903405 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.904478 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.906274 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:03 crc kubenswrapper[4982]: I0122 06:07:03.915677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brr4z\" (UniqueName: \"kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z\") pod \"placement-664cd4b9c4-ccktp\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.029693 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.634690 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.636039 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642017 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642067 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642382 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642488 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642510 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.642655 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jckfq" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.649377 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.705961 4982 generic.go:334] "Generic (PLEG): container finished" podID="59ce449e-073f-4c66-ade6-fa8448573827" containerID="f21f6b2f159515920e265fd73f4db3e8cdea1216effa74442ab287d2ab610e13" exitCode=0 Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.706136 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gms7h" event={"ID":"59ce449e-073f-4c66-ade6-fa8448573827","Type":"ContainerDied","Data":"f21f6b2f159515920e265fd73f4db3e8cdea1216effa74442ab287d2ab610e13"} Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714043 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714094 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s68vt\" (UniqueName: \"kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714173 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714205 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714225 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.714249 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815674 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815738 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815764 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s68vt\" (UniqueName: \"kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815948 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815976 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.815998 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.827085 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.829704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.831644 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.833401 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.834326 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.835135 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.835787 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s68vt\" (UniqueName: \"kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.855752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys\") pod \"keystone-7c5dd486cd-r6bbs\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:04 crc kubenswrapper[4982]: I0122 06:07:04.963597 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.654078 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.684540 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.698946 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.700161 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gms7h" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752280 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752359 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle\") pod \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752383 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752400 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752460 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752505 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twdcl\" (UniqueName: \"kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752531 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbgzc\" (UniqueName: \"kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc\") pod \"59ce449e-073f-4c66-ade6-fa8448573827\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752559 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g88d9\" (UniqueName: \"kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9\") pod \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752595 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle\") pod \"59ce449e-073f-4c66-ade6-fa8448573827\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752641 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config\") pod \"59ce449e-073f-4c66-ade6-fa8448573827\" (UID: \"59ce449e-073f-4c66-ade6-fa8448573827\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752669 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752702 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752718 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data\") pod \"b9e5c13b-ae97-4be3-bb94-f33eba693490\" (UID: \"b9e5c13b-ae97-4be3-bb94-f33eba693490\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752772 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data\") pod \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\" (UID: \"60dd52f0-a379-40e6-b2e3-dcd5c296bc59\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752789 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752816 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.752868 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ssfc\" (UniqueName: \"kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc\") pod \"4295ac1c-eaee-4720-8384-c112e1f991c3\" (UID: \"4295ac1c-eaee-4720-8384-c112e1f991c3\") " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.759127 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.765579 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc" (OuterVolumeSpecName: "kube-api-access-4ssfc") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "kube-api-access-4ssfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.770281 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.770346 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.770346 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9e5c13b-ae97-4be3-bb94-f33eba693490","Type":"ContainerDied","Data":"ea0587678d0c4bb5a3043a35cef43525888e926bfbd272e2417d116d2d019893"} Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.770419 4982 scope.go:117] "RemoveContainer" containerID="b1f6829efdd2f047dac5bc31975e51c800d162196de5241830291198d42a35ae" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.771353 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs" (OuterVolumeSpecName: "logs") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.774261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs" (OuterVolumeSpecName: "logs") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.775371 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gms7h" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.775393 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gms7h" event={"ID":"59ce449e-073f-4c66-ade6-fa8448573827","Type":"ContainerDied","Data":"d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65"} Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.776014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts" (OuterVolumeSpecName: "scripts") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.776815 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d819997680fa24fc3cb3db2696e568d4f7bb4f6c162d422e51d82531c5b90d65" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.784021 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "60dd52f0-a379-40e6-b2e3-dcd5c296bc59" (UID: "60dd52f0-a379-40e6-b2e3-dcd5c296bc59"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.790314 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc" (OuterVolumeSpecName: "kube-api-access-vbgzc") pod "59ce449e-073f-4c66-ade6-fa8448573827" (UID: "59ce449e-073f-4c66-ade6-fa8448573827"). InnerVolumeSpecName "kube-api-access-vbgzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.790425 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.790500 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl" (OuterVolumeSpecName: "kube-api-access-twdcl") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "kube-api-access-twdcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.792040 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9" (OuterVolumeSpecName: "kube-api-access-g88d9") pod "60dd52f0-a379-40e6-b2e3-dcd5c296bc59" (UID: "60dd52f0-a379-40e6-b2e3-dcd5c296bc59"). InnerVolumeSpecName "kube-api-access-g88d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.792504 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rsp2k" event={"ID":"60dd52f0-a379-40e6-b2e3-dcd5c296bc59","Type":"ContainerDied","Data":"a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0"} Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.792535 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a98f4981abe38b8eb3668a1c3ff37e633461a228cd91bcb0e0387a9658e134b0" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.792600 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rsp2k" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.793716 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.796523 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts" (OuterVolumeSpecName: "scripts") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.808569 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4295ac1c-eaee-4720-8384-c112e1f991c3","Type":"ContainerDied","Data":"0c6663be1096a35b36095bf3ffa04534342cad0e34b2ce5085ee0b4a9fc534ce"} Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.808661 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.813374 4982 scope.go:117] "RemoveContainer" containerID="1c5ea6287441bc97b99897a7b50c220d856b9c60344059b7433b07707241baa3" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.829272 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config" (OuterVolumeSpecName: "config") pod "59ce449e-073f-4c66-ade6-fa8448573827" (UID: "59ce449e-073f-4c66-ade6-fa8448573827"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.831142 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59ce449e-073f-4c66-ade6-fa8448573827" (UID: "59ce449e-073f-4c66-ade6-fa8448573827"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.844137 4982 scope.go:117] "RemoveContainer" containerID="66dfe5ae987047ad7f7c338700f8f170b47b869ff3d7bdebfec0d21c7dba57c6" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.854952 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twdcl\" (UniqueName: \"kubernetes.io/projected/b9e5c13b-ae97-4be3-bb94-f33eba693490-kube-api-access-twdcl\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.854986 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbgzc\" (UniqueName: \"kubernetes.io/projected/59ce449e-073f-4c66-ade6-fa8448573827-kube-api-access-vbgzc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855012 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g88d9\" (UniqueName: \"kubernetes.io/projected/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-kube-api-access-g88d9\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855021 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855030 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855040 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/59ce449e-073f-4c66-ade6-fa8448573827-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855068 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855623 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855632 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855641 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ssfc\" (UniqueName: \"kubernetes.io/projected/4295ac1c-eaee-4720-8384-c112e1f991c3-kube-api-access-4ssfc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855679 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855688 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855696 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855704 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9e5c13b-ae97-4be3-bb94-f33eba693490-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.855711 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4295ac1c-eaee-4720-8384-c112e1f991c3-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.857226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.859020 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60dd52f0-a379-40e6-b2e3-dcd5c296bc59" (UID: "60dd52f0-a379-40e6-b2e3-dcd5c296bc59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.870184 4982 scope.go:117] "RemoveContainer" containerID="8e5b2372fdc2ebf629039f88a64b31e52f7a42c6b46129b88e0396142ec25b0d" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.882574 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.888741 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.903752 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data" (OuterVolumeSpecName: "config-data") pod "b9e5c13b-ae97-4be3-bb94-f33eba693490" (UID: "b9e5c13b-ae97-4be3-bb94-f33eba693490"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.924383 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data" (OuterVolumeSpecName: "config-data") pod "4295ac1c-eaee-4720-8384-c112e1f991c3" (UID: "4295ac1c-eaee-4720-8384-c112e1f991c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.926682 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.953233 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956673 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956695 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956705 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956714 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4295ac1c-eaee-4720-8384-c112e1f991c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956722 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956730 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dd52f0-a379-40e6-b2e3-dcd5c296bc59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: I0122 06:07:06.956738 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e5c13b-ae97-4be3-bb94-f33eba693490-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:06 crc kubenswrapper[4982]: W0122 06:07:06.964765 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9f36aaa_d627_4dfe_ab21_b5bedee0a25c.slice/crio-674e5b35a4f913f3939a4f526e18e8cd5bf32d30da8a86756283841fe83e39e8 WatchSource:0}: Error finding container 674e5b35a4f913f3939a4f526e18e8cd5bf32d30da8a86756283841fe83e39e8: Status 404 returned error can't find the container with id 674e5b35a4f913f3939a4f526e18e8cd5bf32d30da8a86756283841fe83e39e8 Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.083183 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:07:07 crc kubenswrapper[4982]: W0122 06:07:07.097343 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba39683_f142_405a_a1c8_83841c5b2cd0.slice/crio-e5bef35f7cbb99b1c6eafa9f5060bc281d1c241d43dbf0cc23ef2d3fb06cb9f3 WatchSource:0}: Error finding container e5bef35f7cbb99b1c6eafa9f5060bc281d1c241d43dbf0cc23ef2d3fb06cb9f3: Status 404 returned error can't find the container with id e5bef35f7cbb99b1c6eafa9f5060bc281d1c241d43dbf0cc23ef2d3fb06cb9f3 Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.118812 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.125864 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143481 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143796 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59ce449e-073f-4c66-ade6-fa8448573827" containerName="neutron-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143809 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59ce449e-073f-4c66-ade6-fa8448573827" containerName="neutron-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143830 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143836 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143862 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143868 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143878 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143884 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143894 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60dd52f0-a379-40e6-b2e3-dcd5c296bc59" containerName="barbican-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143899 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="60dd52f0-a379-40e6-b2e3-dcd5c296bc59" containerName="barbican-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: E0122 06:07:07.143918 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.143924 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144076 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144087 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-httpd" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144099 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144108 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="60dd52f0-a379-40e6-b2e3-dcd5c296bc59" containerName="barbican-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144120 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="59ce449e-073f-4c66-ade6-fa8448573827" containerName="neutron-db-sync" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.144130 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" containerName="glance-log" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.156301 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.156395 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.159428 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jk7tf" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.159694 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.160528 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.160744 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.175690 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.202532 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.215236 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.216826 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.218536 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.218792 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.251569 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261629 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261669 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261700 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261731 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261751 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261795 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261834 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261920 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261945 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88hpw\" (UniqueName: \"kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.261982 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.262006 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.262027 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.262056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.262085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bhqk\" (UniqueName: \"kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363244 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88hpw\" (UniqueName: \"kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363303 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363322 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363373 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bhqk\" (UniqueName: \"kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363427 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363440 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363466 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363544 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363561 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363586 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363602 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.363965 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.364400 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.365198 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.366431 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.366920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.367000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.365229 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.377539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.377996 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.378535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.378801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.380535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.381978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.383539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bhqk\" (UniqueName: \"kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.385302 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88hpw\" (UniqueName: \"kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.385395 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.407562 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.413639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.480092 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.552373 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.733072 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4295ac1c-eaee-4720-8384-c112e1f991c3" path="/var/lib/kubelet/pods/4295ac1c-eaee-4720-8384-c112e1f991c3/volumes" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.734491 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9e5c13b-ae97-4be3-bb94-f33eba693490" path="/var/lib/kubelet/pods/b9e5c13b-ae97-4be3-bb94-f33eba693490/volumes" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.825637 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829014 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerStarted","Data":"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829048 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerStarted","Data":"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829058 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerStarted","Data":"e5bef35f7cbb99b1c6eafa9f5060bc281d1c241d43dbf0cc23ef2d3fb06cb9f3"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829072 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829081 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.829140 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.836738 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-crgl6" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.836922 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.837007 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.837191 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.838545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerStarted","Data":"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.843923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c5dd486cd-r6bbs" event={"ID":"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c","Type":"ContainerStarted","Data":"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.843972 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c5dd486cd-r6bbs" event={"ID":"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c","Type":"ContainerStarted","Data":"674e5b35a4f913f3939a4f526e18e8cd5bf32d30da8a86756283841fe83e39e8"} Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.844669 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.857875 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-664cd4b9c4-ccktp" podStartSLOduration=4.857841443 podStartE2EDuration="4.857841443s" podCreationTimestamp="2026-01-22 06:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:07.857451863 +0000 UTC m=+1288.696089866" watchObservedRunningTime="2026-01-22 06:07:07.857841443 +0000 UTC m=+1288.696479446" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.949324 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7c5dd486cd-r6bbs" podStartSLOduration=3.949299199 podStartE2EDuration="3.949299199s" podCreationTimestamp="2026-01-22 06:07:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:07.894581151 +0000 UTC m=+1288.733219154" watchObservedRunningTime="2026-01-22 06:07:07.949299199 +0000 UTC m=+1288.787937202" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.984794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxbcs\" (UniqueName: \"kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.985015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.985035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.985068 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:07 crc kubenswrapper[4982]: I0122 06:07:07.985095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.017107 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.018587 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.027334 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.035048 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.052994 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.053261 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="dnsmasq-dns" containerID="cri-o://f72c805ca08d3bd910a315f7dab4023cf167defa0bca0344bec478ae4f6e0be2" gracePeriod=10 Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.062926 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.079665 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.081141 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086444 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086538 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086556 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086582 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086670 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086736 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sj9s\" (UniqueName: \"kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086795 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086844 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxbcs\" (UniqueName: \"kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.086897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.087061 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.103756 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.103937 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.109326 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.115243 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.121348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxbcs\" (UniqueName: \"kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs\") pod \"barbican-worker-796946d7c7-z4qnr\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.163690 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.147:5353: connect: connection refused" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.164212 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.176734 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.178340 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190106 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190136 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190241 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvx9s\" (UniqueName: \"kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190311 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sj9s\" (UniqueName: \"kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190344 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190374 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g49hs\" (UniqueName: \"kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190399 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190419 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190440 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.190469 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.191270 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.191761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.192272 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.197445 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.198026 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.198337 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.225410 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.226565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sj9s\" (UniqueName: \"kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s\") pod \"dnsmasq-dns-66cdd4b5b5-b5tfx\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.237809 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.239308 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.242519 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.245987 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.246054 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.246313 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-sr6nk" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.271973 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293025 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvx9s\" (UniqueName: \"kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293066 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293092 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293115 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293152 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293169 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g49hs\" (UniqueName: \"kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293189 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293205 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293236 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293283 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55zhm\" (UniqueName: \"kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293304 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.293372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.296231 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.300978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.301415 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.304282 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.307480 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.311129 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.314069 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvx9s\" (UniqueName: \"kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.315472 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.317153 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.319697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data\") pod \"barbican-api-6d8df7d8f6-ctbwg\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.327263 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g49hs\" (UniqueName: \"kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs\") pod \"barbican-keystone-listener-59458f7b58-qd4fn\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.357988 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.395559 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.395668 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.395693 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55zhm\" (UniqueName: \"kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.395745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.395790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.402359 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.403086 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.403752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.404370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.415282 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55zhm\" (UniqueName: \"kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm\") pod \"neutron-5566dc6564-s4gjt\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.472055 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.562669 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.628356 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.755990 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:07:08 crc kubenswrapper[4982]: W0122 06:07:08.781683 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c1843cf_e6eb_400b_84ca_5e9d209a23ce.slice/crio-7cb8b3b221ac3fa310c5c2ceedbd30e044b5963b2d92005e8a6ef9f1a1895229 WatchSource:0}: Error finding container 7cb8b3b221ac3fa310c5c2ceedbd30e044b5963b2d92005e8a6ef9f1a1895229: Status 404 returned error can't find the container with id 7cb8b3b221ac3fa310c5c2ceedbd30e044b5963b2d92005e8a6ef9f1a1895229 Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.856328 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerStarted","Data":"0fbac9aadbb4d6d403e07227d9ebaad3515253d708b84366b38262fc6e69358c"} Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.862871 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerStarted","Data":"7cb8b3b221ac3fa310c5c2ceedbd30e044b5963b2d92005e8a6ef9f1a1895229"} Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.867510 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerID="f72c805ca08d3bd910a315f7dab4023cf167defa0bca0344bec478ae4f6e0be2" exitCode=0 Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.867653 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" event={"ID":"2d901ec6-c01e-4859-930d-04e5ceac9e04","Type":"ContainerDied","Data":"f72c805ca08d3bd910a315f7dab4023cf167defa0bca0344bec478ae4f6e0be2"} Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.964449 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:08 crc kubenswrapper[4982]: W0122 06:07:08.977694 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod857e17c8_bcb3_4632_af28_27fe80d6fe2d.slice/crio-b33e9a8b52662c00f5a92430e31798aef4291003dde477d2651f2f8b8caccebb WatchSource:0}: Error finding container b33e9a8b52662c00f5a92430e31798aef4291003dde477d2651f2f8b8caccebb: Status 404 returned error can't find the container with id b33e9a8b52662c00f5a92430e31798aef4291003dde477d2651f2f8b8caccebb Jan 22 06:07:08 crc kubenswrapper[4982]: I0122 06:07:08.990796 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116005 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116108 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r25s4\" (UniqueName: \"kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116168 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116665 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116712 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.116773 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0\") pod \"2d901ec6-c01e-4859-930d-04e5ceac9e04\" (UID: \"2d901ec6-c01e-4859-930d-04e5ceac9e04\") " Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.125608 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4" (OuterVolumeSpecName: "kube-api-access-r25s4") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "kube-api-access-r25s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.174346 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.189481 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.193021 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:07:09 crc kubenswrapper[4982]: W0122 06:07:09.200095 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f54d316_aa4d_4c56_8681_3fa9816a1b80.slice/crio-78f518091f4664c3d8683cef6c0a28967b229c4ba09f65b3e47ad3e22a50ec4f WatchSource:0}: Error finding container 78f518091f4664c3d8683cef6c0a28967b229c4ba09f65b3e47ad3e22a50ec4f: Status 404 returned error can't find the container with id 78f518091f4664c3d8683cef6c0a28967b229c4ba09f65b3e47ad3e22a50ec4f Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.208178 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.219104 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.219138 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r25s4\" (UniqueName: \"kubernetes.io/projected/2d901ec6-c01e-4859-930d-04e5ceac9e04-kube-api-access-r25s4\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.219150 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.228225 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.237076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.258452 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config" (OuterVolumeSpecName: "config") pod "2d901ec6-c01e-4859-930d-04e5ceac9e04" (UID: "2d901ec6-c01e-4859-930d-04e5ceac9e04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.330333 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.330367 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.330384 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d901ec6-c01e-4859-930d-04e5ceac9e04-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.473168 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:09 crc kubenswrapper[4982]: W0122 06:07:09.489657 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda273f991_b7b0_4476_a0f5_80e6a439951b.slice/crio-644e6a3249a43f1c0c251475f6a2437406903451baa7f1887ec09998e7dfb3ef WatchSource:0}: Error finding container 644e6a3249a43f1c0c251475f6a2437406903451baa7f1887ec09998e7dfb3ef: Status 404 returned error can't find the container with id 644e6a3249a43f1c0c251475f6a2437406903451baa7f1887ec09998e7dfb3ef Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.504518 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:09 crc kubenswrapper[4982]: W0122 06:07:09.508179 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ed422a7_07eb_4414_bae7_fcb9aaa199d9.slice/crio-e16fd424ea8881539b7234c25b057b3ae2742238566ac87eec31a1739d13f738 WatchSource:0}: Error finding container e16fd424ea8881539b7234c25b057b3ae2742238566ac87eec31a1739d13f738: Status 404 returned error can't find the container with id e16fd424ea8881539b7234c25b057b3ae2742238566ac87eec31a1739d13f738 Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.905565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" event={"ID":"2d901ec6-c01e-4859-930d-04e5ceac9e04","Type":"ContainerDied","Data":"b8830ec87ed33155338470d14011973c05fa79d637b00dfb3aaa69e5d2262727"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.905922 4982 scope.go:117] "RemoveContainer" containerID="f72c805ca08d3bd910a315f7dab4023cf167defa0bca0344bec478ae4f6e0be2" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.905583 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-97n8z" Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.906963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerStarted","Data":"efc543e5393157011a31dd852ae15b06e5f944fc56ddcba577afd5e326c59667"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.910667 4982 generic.go:334] "Generic (PLEG): container finished" podID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerID="c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd" exitCode=0 Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.910752 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" event={"ID":"80c8240c-c286-40a5-91c3-db77f4a01b8e","Type":"ContainerDied","Data":"c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.910794 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" event={"ID":"80c8240c-c286-40a5-91c3-db77f4a01b8e","Type":"ContainerStarted","Data":"7cbf1ef2eddee9268d2d82ac415fee1b95ec078c4b0d46793ca6f3aa3dc3a184"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.912337 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerStarted","Data":"e16fd424ea8881539b7234c25b057b3ae2742238566ac87eec31a1739d13f738"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.913811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerStarted","Data":"b33e9a8b52662c00f5a92430e31798aef4291003dde477d2651f2f8b8caccebb"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.914781 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerStarted","Data":"78f518091f4664c3d8683cef6c0a28967b229c4ba09f65b3e47ad3e22a50ec4f"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.915829 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerStarted","Data":"1c30acf7712684fed6d3dbc08053f9e0f00a311305bcd3c6a262a28f8f768d5b"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.915865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerStarted","Data":"644e6a3249a43f1c0c251475f6a2437406903451baa7f1887ec09998e7dfb3ef"} Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.984221 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:07:09 crc kubenswrapper[4982]: I0122 06:07:09.993542 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-97n8z"] Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.623186 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:07:10 crc kubenswrapper[4982]: E0122 06:07:10.623773 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="dnsmasq-dns" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.623791 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="dnsmasq-dns" Jan 22 06:07:10 crc kubenswrapper[4982]: E0122 06:07:10.623814 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="init" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.623822 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="init" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.624014 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" containerName="dnsmasq-dns" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.624934 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.630226 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.630412 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.631084 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.693414 4982 scope.go:117] "RemoveContainer" containerID="0c7b4d7f0102c35b4afdc72cc418d4cad10989c330476cb5c780f0b64f1a12cf" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774217 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774308 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774350 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774385 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prfjm\" (UniqueName: \"kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774419 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.774433 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.877979 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prfjm\" (UniqueName: \"kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.878952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.879004 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.879032 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.879238 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.879384 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.879506 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.884184 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.884764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.884981 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.885600 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.888579 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.896787 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.903355 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prfjm\" (UniqueName: \"kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm\") pod \"neutron-f9754c45-wzfk5\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.927341 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerStarted","Data":"4d6ddff127942ffff8e65c5a3983df0e512a4c51c2e9ba3184f2583448796e60"} Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.932448 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerStarted","Data":"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2"} Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.936241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerStarted","Data":"f33bced3fdf444f850ce7aa8ab0dba78c9c52dc2edae2ac10bfb3499fc560375"} Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.962903 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.962883729 podStartE2EDuration="3.962883729s" podCreationTimestamp="2026-01-22 06:07:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:10.959339056 +0000 UTC m=+1291.797977059" watchObservedRunningTime="2026-01-22 06:07:10.962883729 +0000 UTC m=+1291.801521732" Jan 22 06:07:10 crc kubenswrapper[4982]: I0122 06:07:10.969860 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:11 crc kubenswrapper[4982]: I0122 06:07:11.735561 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d901ec6-c01e-4859-930d-04e5ceac9e04" path="/var/lib/kubelet/pods/2d901ec6-c01e-4859-930d-04e5ceac9e04/volumes" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.737874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.739951 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.741817 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.744097 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.776237 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.848942 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849006 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849069 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849091 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849148 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tsxg\" (UniqueName: \"kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849179 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.849227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.950250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.950539 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.950649 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.950751 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.950988 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tsxg\" (UniqueName: \"kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.951811 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.951955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.952178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.956449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.957084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.958636 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.959314 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.960259 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:14 crc kubenswrapper[4982]: I0122 06:07:14.967756 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tsxg\" (UniqueName: \"kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg\") pod \"barbican-api-68c6f69c56-rbr89\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:15 crc kubenswrapper[4982]: I0122 06:07:15.113103 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:16 crc kubenswrapper[4982]: I0122 06:07:16.724163 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:07:16 crc kubenswrapper[4982]: W0122 06:07:16.746063 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6593811_6583_4900_b402_5af9db3887b3.slice/crio-c527eec1bdca10c551098f56f23a4e6dab9f4a03e9aaeca03064606b28a6628e WatchSource:0}: Error finding container c527eec1bdca10c551098f56f23a4e6dab9f4a03e9aaeca03064606b28a6628e: Status 404 returned error can't find the container with id c527eec1bdca10c551098f56f23a4e6dab9f4a03e9aaeca03064606b28a6628e Jan 22 06:07:16 crc kubenswrapper[4982]: I0122 06:07:16.779945 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.007429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerStarted","Data":"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.012484 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerStarted","Data":"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.012521 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerStarted","Data":"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.016435 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerStarted","Data":"6bdbcca209318e7de23c54a80c39908eea30839974039d5c924f9f3adf9c2b3d"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.016494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerStarted","Data":"b426b351c96540a971c129061668b3e6c61efe6a37d38c17deb6a92d4871eaf1"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.027780 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.027763667 podStartE2EDuration="10.027763667s" podCreationTimestamp="2026-01-22 06:07:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:17.026569735 +0000 UTC m=+1297.865207738" watchObservedRunningTime="2026-01-22 06:07:17.027763667 +0000 UTC m=+1297.866401670" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.055650 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" event={"ID":"80c8240c-c286-40a5-91c3-db77f4a01b8e","Type":"ContainerStarted","Data":"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.056694 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.062341 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerStarted","Data":"a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.062691 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" podStartSLOduration=3.153866323 podStartE2EDuration="10.062671614s" podCreationTimestamp="2026-01-22 06:07:07 +0000 UTC" firstStartedPulling="2026-01-22 06:07:09.206290777 +0000 UTC m=+1290.044928780" lastFinishedPulling="2026-01-22 06:07:16.115096068 +0000 UTC m=+1296.953734071" observedRunningTime="2026-01-22 06:07:17.044287616 +0000 UTC m=+1297.882925619" watchObservedRunningTime="2026-01-22 06:07:17.062671614 +0000 UTC m=+1297.901309617" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.063141 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.065841 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerStarted","Data":"3baebc048a8568a0b35bbd367312ef088577cfd6e3066d5b329295972e568cad"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.072768 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.072803 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.074515 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" podStartSLOduration=9.074505332 podStartE2EDuration="9.074505332s" podCreationTimestamp="2026-01-22 06:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:17.073753672 +0000 UTC m=+1297.912391675" watchObservedRunningTime="2026-01-22 06:07:17.074505332 +0000 UTC m=+1297.913143335" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.085599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerStarted","Data":"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.085658 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerStarted","Data":"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.100123 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5566dc6564-s4gjt" podStartSLOduration=9.100100167 podStartE2EDuration="9.100100167s" podCreationTimestamp="2026-01-22 06:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:17.099884802 +0000 UTC m=+1297.938522795" watchObservedRunningTime="2026-01-22 06:07:17.100100167 +0000 UTC m=+1297.938738170" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107243 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerStarted","Data":"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107303 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-central-agent" containerID="cri-o://b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd" gracePeriod=30 Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107554 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107597 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="sg-core" containerID="cri-o://be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800" gracePeriod=30 Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107593 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="proxy-httpd" containerID="cri-o://845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982" gracePeriod=30 Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.107671 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-notification-agent" containerID="cri-o://e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be" gracePeriod=30 Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.124355 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerStarted","Data":"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.124402 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerStarted","Data":"c527eec1bdca10c551098f56f23a4e6dab9f4a03e9aaeca03064606b28a6628e"} Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.127270 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" podStartSLOduration=9.127250563 podStartE2EDuration="9.127250563s" podCreationTimestamp="2026-01-22 06:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:17.120103868 +0000 UTC m=+1297.958741871" watchObservedRunningTime="2026-01-22 06:07:17.127250563 +0000 UTC m=+1297.965888566" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.149789 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.007575761 podStartE2EDuration="42.149768649s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="2026-01-22 06:06:37.108522478 +0000 UTC m=+1257.947160481" lastFinishedPulling="2026-01-22 06:07:16.250715366 +0000 UTC m=+1297.089353369" observedRunningTime="2026-01-22 06:07:17.149628135 +0000 UTC m=+1297.988266148" watchObservedRunningTime="2026-01-22 06:07:17.149768649 +0000 UTC m=+1297.988406652" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.176257 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-796946d7c7-z4qnr" podStartSLOduration=2.880941292 podStartE2EDuration="10.176236718s" podCreationTimestamp="2026-01-22 06:07:07 +0000 UTC" firstStartedPulling="2026-01-22 06:07:08.819839263 +0000 UTC m=+1289.658477266" lastFinishedPulling="2026-01-22 06:07:16.115134649 +0000 UTC m=+1296.953772692" observedRunningTime="2026-01-22 06:07:17.172370447 +0000 UTC m=+1298.011008450" watchObservedRunningTime="2026-01-22 06:07:17.176236718 +0000 UTC m=+1298.014874721" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.483151 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.483400 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.540668 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.542180 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.553159 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.553220 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.600825 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 06:07:17 crc kubenswrapper[4982]: I0122 06:07:17.601365 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.152725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerStarted","Data":"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.153934 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.165081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfmh" event={"ID":"1f97eee4-4c10-4f1e-8173-2f8d1c955049","Type":"ContainerStarted","Data":"3981b59494056203a498dcf4f7d08ba27e65320accdde947f19aa23111fff153"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.170425 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerStarted","Data":"3ebd5e2b4a01d8d16123bc5c692c42b9c4f85f654043c5e5f979425f4bc1aaba"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.171290 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.171316 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.178135 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-f9754c45-wzfk5" podStartSLOduration=8.178120181 podStartE2EDuration="8.178120181s" podCreationTimestamp="2026-01-22 06:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:18.172111635 +0000 UTC m=+1299.010749638" watchObservedRunningTime="2026-01-22 06:07:18.178120181 +0000 UTC m=+1299.016758184" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182611 4982 generic.go:334] "Generic (PLEG): container finished" podID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerID="845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982" exitCode=0 Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182698 4982 generic.go:334] "Generic (PLEG): container finished" podID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerID="be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800" exitCode=2 Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182710 4982 generic.go:334] "Generic (PLEG): container finished" podID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerID="b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd" exitCode=0 Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182655 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerDied","Data":"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerDied","Data":"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.182940 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerDied","Data":"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd"} Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.184975 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.185001 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.185012 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.185547 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.199919 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-vtfmh" podStartSLOduration=3.949869844 podStartE2EDuration="43.199902287s" podCreationTimestamp="2026-01-22 06:06:35 +0000 UTC" firstStartedPulling="2026-01-22 06:06:37.069769196 +0000 UTC m=+1257.908407199" lastFinishedPulling="2026-01-22 06:07:16.319801639 +0000 UTC m=+1297.158439642" observedRunningTime="2026-01-22 06:07:18.197613528 +0000 UTC m=+1299.036251531" watchObservedRunningTime="2026-01-22 06:07:18.199902287 +0000 UTC m=+1299.038540280" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.224599 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68c6f69c56-rbr89" podStartSLOduration=4.224580189 podStartE2EDuration="4.224580189s" podCreationTimestamp="2026-01-22 06:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:18.220319408 +0000 UTC m=+1299.058957411" watchObservedRunningTime="2026-01-22 06:07:18.224580189 +0000 UTC m=+1299.063218192" Jan 22 06:07:18 crc kubenswrapper[4982]: I0122 06:07:18.856879 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:20 crc kubenswrapper[4982]: I0122 06:07:20.131081 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:20 crc kubenswrapper[4982]: I0122 06:07:20.138328 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:20 crc kubenswrapper[4982]: I0122 06:07:20.158381 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 06:07:20 crc kubenswrapper[4982]: I0122 06:07:20.941565 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081332 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081466 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081489 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081629 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081656 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drm7l\" (UniqueName: \"kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.081677 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd\") pod \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\" (UID: \"eba6d2d0-f7a0-4865-8b11-bdb9b302271c\") " Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.082644 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.083579 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.093218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l" (OuterVolumeSpecName: "kube-api-access-drm7l") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "kube-api-access-drm7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.107008 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts" (OuterVolumeSpecName: "scripts") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.137745 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.164042 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184445 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184484 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drm7l\" (UniqueName: \"kubernetes.io/projected/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-kube-api-access-drm7l\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184501 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184514 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184527 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.184537 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.193111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data" (OuterVolumeSpecName: "config-data") pod "eba6d2d0-f7a0-4865-8b11-bdb9b302271c" (UID: "eba6d2d0-f7a0-4865-8b11-bdb9b302271c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.234781 4982 generic.go:334] "Generic (PLEG): container finished" podID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerID="e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be" exitCode=0 Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.234862 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.234874 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerDied","Data":"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be"} Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.234927 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eba6d2d0-f7a0-4865-8b11-bdb9b302271c","Type":"ContainerDied","Data":"06b5837124fd7018f3ef243d692555e0754bff37545f739cacc9f1c0957dc5fe"} Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.234946 4982 scope.go:117] "RemoveContainer" containerID="845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.245004 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.258868 4982 scope.go:117] "RemoveContainer" containerID="be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.287984 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eba6d2d0-f7a0-4865-8b11-bdb9b302271c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.291422 4982 scope.go:117] "RemoveContainer" containerID="e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.302629 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.309006 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.327475 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.327804 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="sg-core" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.327822 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="sg-core" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.327835 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-notification-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.327841 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-notification-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.327870 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="proxy-httpd" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.327875 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="proxy-httpd" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.327898 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-central-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.327904 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-central-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.328058 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="proxy-httpd" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.328082 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-notification-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.328101 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="sg-core" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.328119 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" containerName="ceilometer-central-agent" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.355917 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.356033 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.366042 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.367867 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.369922 4982 scope.go:117] "RemoveContainer" containerID="b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392618 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhbnd\" (UniqueName: \"kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392659 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392696 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392724 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392772 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392823 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.392867 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.434717 4982 scope.go:117] "RemoveContainer" containerID="845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.435320 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982\": container with ID starting with 845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982 not found: ID does not exist" containerID="845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435354 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982"} err="failed to get container status \"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982\": rpc error: code = NotFound desc = could not find container \"845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982\": container with ID starting with 845966fb977cf9170f0f6329c7b051bba546a7df3a5a7c951e82ee32735d1982 not found: ID does not exist" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435374 4982 scope.go:117] "RemoveContainer" containerID="be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.435626 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800\": container with ID starting with be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800 not found: ID does not exist" containerID="be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435645 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800"} err="failed to get container status \"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800\": rpc error: code = NotFound desc = could not find container \"be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800\": container with ID starting with be973b6191fd92618aa9f97131e3d89506037169b999429d678fb9ea1f07b800 not found: ID does not exist" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435658 4982 scope.go:117] "RemoveContainer" containerID="e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.435880 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be\": container with ID starting with e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be not found: ID does not exist" containerID="e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435898 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be"} err="failed to get container status \"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be\": rpc error: code = NotFound desc = could not find container \"e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be\": container with ID starting with e9596b692e4b9a1bfa6cdb69447d7e189d8474dcede7412f8228fc0fba69f8be not found: ID does not exist" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.435912 4982 scope.go:117] "RemoveContainer" containerID="b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd" Jan 22 06:07:21 crc kubenswrapper[4982]: E0122 06:07:21.436115 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd\": container with ID starting with b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd not found: ID does not exist" containerID="b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.436132 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd"} err="failed to get container status \"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd\": rpc error: code = NotFound desc = could not find container \"b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd\": container with ID starting with b5fd44836c00103f4aa7632ae7a10d9c1ac522714666404305ba574e07457afd not found: ID does not exist" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494303 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494407 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494610 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.494648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhbnd\" (UniqueName: \"kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.498034 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.499947 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.500664 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.501972 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.502186 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.506114 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.510684 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhbnd\" (UniqueName: \"kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd\") pod \"ceilometer-0\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.706143 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:21 crc kubenswrapper[4982]: I0122 06:07:21.733038 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eba6d2d0-f7a0-4865-8b11-bdb9b302271c" path="/var/lib/kubelet/pods/eba6d2d0-f7a0-4865-8b11-bdb9b302271c/volumes" Jan 22 06:07:22 crc kubenswrapper[4982]: I0122 06:07:22.262083 4982 generic.go:334] "Generic (PLEG): container finished" podID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" containerID="3981b59494056203a498dcf4f7d08ba27e65320accdde947f19aa23111fff153" exitCode=0 Jan 22 06:07:22 crc kubenswrapper[4982]: I0122 06:07:22.262435 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfmh" event={"ID":"1f97eee4-4c10-4f1e-8173-2f8d1c955049","Type":"ContainerDied","Data":"3981b59494056203a498dcf4f7d08ba27e65320accdde947f19aa23111fff153"} Jan 22 06:07:22 crc kubenswrapper[4982]: I0122 06:07:22.342062 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.280264 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerStarted","Data":"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c"} Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.280570 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerStarted","Data":"50f993beac015d7d4c8d95f4c6871c56731d4f492a2c6c713a52a05c414a48f4"} Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.475029 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.550123 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.550598 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="dnsmasq-dns" containerID="cri-o://36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862" gracePeriod=10 Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.765123 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.846989 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tbjz\" (UniqueName: \"kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.847029 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.847075 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.847134 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.847152 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.847240 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data\") pod \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\" (UID: \"1f97eee4-4c10-4f1e-8173-2f8d1c955049\") " Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.852444 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.852512 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.866276 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts" (OuterVolumeSpecName: "scripts") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.869617 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz" (OuterVolumeSpecName: "kube-api-access-6tbjz") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "kube-api-access-6tbjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.875753 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.920620 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data" (OuterVolumeSpecName: "config-data") pod "1f97eee4-4c10-4f1e-8173-2f8d1c955049" (UID: "1f97eee4-4c10-4f1e-8173-2f8d1c955049"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.948942 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.948974 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tbjz\" (UniqueName: \"kubernetes.io/projected/1f97eee4-4c10-4f1e-8173-2f8d1c955049-kube-api-access-6tbjz\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.948984 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.948993 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f97eee4-4c10-4f1e-8173-2f8d1c955049-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.949001 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:23 crc kubenswrapper[4982]: I0122 06:07:23.949009 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f97eee4-4c10-4f1e-8173-2f8d1c955049-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.158875 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268324 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l9kk\" (UniqueName: \"kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268511 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268611 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.268670 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb\") pod \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\" (UID: \"fad68a0e-d29d-44bb-bf31-55b501abbdcd\") " Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.274023 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk" (OuterVolumeSpecName: "kube-api-access-8l9kk") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "kube-api-access-8l9kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.315625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vtfmh" event={"ID":"1f97eee4-4c10-4f1e-8173-2f8d1c955049","Type":"ContainerDied","Data":"4ee2626a88284bb96051262e042985ec293cc6014c7fd667ad74ecc861d28d7d"} Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.316390 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ee2626a88284bb96051262e042985ec293cc6014c7fd667ad74ecc861d28d7d" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.316496 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vtfmh" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.331941 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.340643 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerStarted","Data":"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44"} Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.344963 4982 generic.go:334] "Generic (PLEG): container finished" podID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerID="36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862" exitCode=0 Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.345039 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.345055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" event={"ID":"fad68a0e-d29d-44bb-bf31-55b501abbdcd","Type":"ContainerDied","Data":"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862"} Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.345107 4982 scope.go:117] "RemoveContainer" containerID="36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.345181 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb" event={"ID":"fad68a0e-d29d-44bb-bf31-55b501abbdcd","Type":"ContainerDied","Data":"2dfda501dc095d7821eb835de7828636c36401a85b4cd712f44641c9a41bc41f"} Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.353410 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.374767 4982 scope.go:117] "RemoveContainer" containerID="49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.376424 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.376493 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l9kk\" (UniqueName: \"kubernetes.io/projected/fad68a0e-d29d-44bb-bf31-55b501abbdcd-kube-api-access-8l9kk\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.376511 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.394087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.418425 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.418511 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config" (OuterVolumeSpecName: "config") pod "fad68a0e-d29d-44bb-bf31-55b501abbdcd" (UID: "fad68a0e-d29d-44bb-bf31-55b501abbdcd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.418561 4982 scope.go:117] "RemoveContainer" containerID="36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862" Jan 22 06:07:24 crc kubenswrapper[4982]: E0122 06:07:24.420213 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862\": container with ID starting with 36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862 not found: ID does not exist" containerID="36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.420250 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862"} err="failed to get container status \"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862\": rpc error: code = NotFound desc = could not find container \"36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862\": container with ID starting with 36e5a44fd3de3632520f9846371ddebac55c80883354a56b71ec992a94a2f862 not found: ID does not exist" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.420277 4982 scope.go:117] "RemoveContainer" containerID="49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9" Jan 22 06:07:24 crc kubenswrapper[4982]: E0122 06:07:24.422021 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9\": container with ID starting with 49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9 not found: ID does not exist" containerID="49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.422051 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9"} err="failed to get container status \"49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9\": rpc error: code = NotFound desc = could not find container \"49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9\": container with ID starting with 49379128ac518b09b1e0d175a299a67243cadd31b94e53a53cdf0bb8dd4251f9 not found: ID does not exist" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.478002 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.478281 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.478291 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fad68a0e-d29d-44bb-bf31-55b501abbdcd-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547159 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:24 crc kubenswrapper[4982]: E0122 06:07:24.547527 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="init" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547543 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="init" Jan 22 06:07:24 crc kubenswrapper[4982]: E0122 06:07:24.547561 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" containerName="cinder-db-sync" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547568 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" containerName="cinder-db-sync" Jan 22 06:07:24 crc kubenswrapper[4982]: E0122 06:07:24.547595 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="dnsmasq-dns" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547600 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="dnsmasq-dns" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547747 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" containerName="dnsmasq-dns" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.547782 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" containerName="cinder-db-sync" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.548622 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.553955 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.554035 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.554279 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.554398 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-sc2js" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581205 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlk5m\" (UniqueName: \"kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581558 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.581667 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.593006 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.627933 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.629785 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.634962 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687800 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlk5m\" (UniqueName: \"kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687863 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687912 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687952 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9pq4\" (UniqueName: \"kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.687985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.688004 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.688030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.688064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.688092 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.689336 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.698048 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.698167 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.698452 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.698489 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.716687 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlk5m\" (UniqueName: \"kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m\") pod \"cinder-scheduler-0\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.717044 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.737221 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-fzfmb"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789317 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9pq4\" (UniqueName: \"kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789425 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.789494 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.791445 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.792064 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.792612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.793621 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.795236 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.806955 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.808309 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.814403 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.821905 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9pq4\" (UniqueName: \"kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4\") pod \"dnsmasq-dns-75dbb546bf-sksw6\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.833032 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.876278 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.890968 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891024 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891051 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891112 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbbnh\" (UniqueName: \"kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891129 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891193 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.891219 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.968035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.992573 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.992893 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.992920 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.992944 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbbnh\" (UniqueName: \"kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.992961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.993016 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.993044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.995400 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.995764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:24 crc kubenswrapper[4982]: I0122 06:07:24.998612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.001286 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.004004 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.020418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.026715 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbbnh\" (UniqueName: \"kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh\") pod \"cinder-api-0\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.282322 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.400687 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.407369 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerStarted","Data":"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1"} Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.551735 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.730329 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fad68a0e-d29d-44bb-bf31-55b501abbdcd" path="/var/lib/kubelet/pods/fad68a0e-d29d-44bb-bf31-55b501abbdcd/volumes" Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.853526 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:25 crc kubenswrapper[4982]: I0122 06:07:25.978176 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.442286 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerStarted","Data":"4ebc0015a3f9bd63a04b413985148d00f18d8762b72807870e6d8430eb307279"} Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.453029 4982 generic.go:334] "Generic (PLEG): container finished" podID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerID="68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94" exitCode=0 Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.453934 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" event={"ID":"df7ef838-a20d-4c64-bc92-e2af11097e20","Type":"ContainerDied","Data":"68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94"} Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.454003 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" event={"ID":"df7ef838-a20d-4c64-bc92-e2af11097e20","Type":"ContainerStarted","Data":"3a59cc3eabe9aeba728662f9c803b489ec68c51ef4c415fcb44ded7ba9f9da78"} Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.471012 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerStarted","Data":"e87070446f03e3ad08fcd80460ecfb39ebd393de2c28fa398107039f6a29d729"} Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.774150 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:26 crc kubenswrapper[4982]: I0122 06:07:26.999012 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.098392 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.098657 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api-log" containerID="cri-o://1c30acf7712684fed6d3dbc08053f9e0f00a311305bcd3c6a262a28f8f768d5b" gracePeriod=30 Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.099450 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api" containerID="cri-o://3baebc048a8568a0b35bbd367312ef088577cfd6e3066d5b329295972e568cad" gracePeriod=30 Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.178507 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.502514 4982 generic.go:334] "Generic (PLEG): container finished" podID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerID="1c30acf7712684fed6d3dbc08053f9e0f00a311305bcd3c6a262a28f8f768d5b" exitCode=143 Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.502919 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerDied","Data":"1c30acf7712684fed6d3dbc08053f9e0f00a311305bcd3c6a262a28f8f768d5b"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.540478 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerStarted","Data":"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.540556 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerStarted","Data":"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.541479 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.541781 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api" containerID="cri-o://d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b" gracePeriod=30 Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.548373 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerStarted","Data":"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.540799 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api-log" containerID="cri-o://92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3" gracePeriod=30 Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.602899 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerStarted","Data":"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.604349 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.607037 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.607016926 podStartE2EDuration="3.607016926s" podCreationTimestamp="2026-01-22 06:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:27.583125804 +0000 UTC m=+1308.421763797" watchObservedRunningTime="2026-01-22 06:07:27.607016926 +0000 UTC m=+1308.445654929" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.630906 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" event={"ID":"df7ef838-a20d-4c64-bc92-e2af11097e20","Type":"ContainerStarted","Data":"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602"} Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.630993 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.640723 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.8704814279999997 podStartE2EDuration="6.640701612s" podCreationTimestamp="2026-01-22 06:07:21 +0000 UTC" firstStartedPulling="2026-01-22 06:07:22.345585034 +0000 UTC m=+1303.184223037" lastFinishedPulling="2026-01-22 06:07:26.115805218 +0000 UTC m=+1306.954443221" observedRunningTime="2026-01-22 06:07:27.630435574 +0000 UTC m=+1308.469073587" watchObservedRunningTime="2026-01-22 06:07:27.640701612 +0000 UTC m=+1308.479339605" Jan 22 06:07:27 crc kubenswrapper[4982]: I0122 06:07:27.665757 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" podStartSLOduration=3.665736183 podStartE2EDuration="3.665736183s" podCreationTimestamp="2026-01-22 06:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:27.649660505 +0000 UTC m=+1308.488298508" watchObservedRunningTime="2026-01-22 06:07:27.665736183 +0000 UTC m=+1308.504374186" Jan 22 06:07:28 crc kubenswrapper[4982]: I0122 06:07:28.641791 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerStarted","Data":"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968"} Jan 22 06:07:28 crc kubenswrapper[4982]: I0122 06:07:28.645666 4982 generic.go:334] "Generic (PLEG): container finished" podID="ec319049-80f6-4114-b54e-085e69caf06a" containerID="92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3" exitCode=143 Jan 22 06:07:28 crc kubenswrapper[4982]: I0122 06:07:28.646415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerDied","Data":"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3"} Jan 22 06:07:28 crc kubenswrapper[4982]: I0122 06:07:28.674916 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.797206321 podStartE2EDuration="4.674899445s" podCreationTimestamp="2026-01-22 06:07:24 +0000 UTC" firstStartedPulling="2026-01-22 06:07:25.437428487 +0000 UTC m=+1306.276066490" lastFinishedPulling="2026-01-22 06:07:26.315121611 +0000 UTC m=+1307.153759614" observedRunningTime="2026-01-22 06:07:28.670367068 +0000 UTC m=+1309.509005071" watchObservedRunningTime="2026-01-22 06:07:28.674899445 +0000 UTC m=+1309.513537448" Jan 22 06:07:29 crc kubenswrapper[4982]: I0122 06:07:29.876913 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.309172 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:46812->10.217.0.157:9311: read: connection reset by peer" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.309599 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:46806->10.217.0.157:9311: read: connection reset by peer" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.672707 4982 generic.go:334] "Generic (PLEG): container finished" podID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerID="3baebc048a8568a0b35bbd367312ef088577cfd6e3066d5b329295972e568cad" exitCode=0 Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.672818 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerDied","Data":"3baebc048a8568a0b35bbd367312ef088577cfd6e3066d5b329295972e568cad"} Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.823115 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.972588 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs\") pod \"a273f991-b7b0-4476-a0f5-80e6a439951b\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.972784 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle\") pod \"a273f991-b7b0-4476-a0f5-80e6a439951b\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.972806 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom\") pod \"a273f991-b7b0-4476-a0f5-80e6a439951b\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.972823 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data\") pod \"a273f991-b7b0-4476-a0f5-80e6a439951b\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.973200 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs" (OuterVolumeSpecName: "logs") pod "a273f991-b7b0-4476-a0f5-80e6a439951b" (UID: "a273f991-b7b0-4476-a0f5-80e6a439951b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.973210 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvx9s\" (UniqueName: \"kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s\") pod \"a273f991-b7b0-4476-a0f5-80e6a439951b\" (UID: \"a273f991-b7b0-4476-a0f5-80e6a439951b\") " Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.973704 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a273f991-b7b0-4476-a0f5-80e6a439951b-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.981554 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s" (OuterVolumeSpecName: "kube-api-access-gvx9s") pod "a273f991-b7b0-4476-a0f5-80e6a439951b" (UID: "a273f991-b7b0-4476-a0f5-80e6a439951b"). InnerVolumeSpecName "kube-api-access-gvx9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:30 crc kubenswrapper[4982]: I0122 06:07:30.996566 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a273f991-b7b0-4476-a0f5-80e6a439951b" (UID: "a273f991-b7b0-4476-a0f5-80e6a439951b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.013997 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a273f991-b7b0-4476-a0f5-80e6a439951b" (UID: "a273f991-b7b0-4476-a0f5-80e6a439951b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.028153 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data" (OuterVolumeSpecName: "config-data") pod "a273f991-b7b0-4476-a0f5-80e6a439951b" (UID: "a273f991-b7b0-4476-a0f5-80e6a439951b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.075735 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.075766 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.075778 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a273f991-b7b0-4476-a0f5-80e6a439951b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.075789 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvx9s\" (UniqueName: \"kubernetes.io/projected/a273f991-b7b0-4476-a0f5-80e6a439951b-kube-api-access-gvx9s\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.693531 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" event={"ID":"a273f991-b7b0-4476-a0f5-80e6a439951b","Type":"ContainerDied","Data":"644e6a3249a43f1c0c251475f6a2437406903451baa7f1887ec09998e7dfb3ef"} Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.693613 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d8df7d8f6-ctbwg" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.694113 4982 scope.go:117] "RemoveContainer" containerID="3baebc048a8568a0b35bbd367312ef088577cfd6e3066d5b329295972e568cad" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.751315 4982 scope.go:117] "RemoveContainer" containerID="1c30acf7712684fed6d3dbc08053f9e0f00a311305bcd3c6a262a28f8f768d5b" Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.757206 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:31 crc kubenswrapper[4982]: I0122 06:07:31.766049 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6d8df7d8f6-ctbwg"] Jan 22 06:07:33 crc kubenswrapper[4982]: I0122 06:07:33.733298 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" path="/var/lib/kubelet/pods/a273f991-b7b0-4476-a0f5-80e6a439951b/volumes" Jan 22 06:07:34 crc kubenswrapper[4982]: I0122 06:07:34.970092 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.066098 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.066772 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="dnsmasq-dns" containerID="cri-o://c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128" gracePeriod=10 Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.069672 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.076164 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.300408 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.347705 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.586039 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670246 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670379 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670400 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sj9s\" (UniqueName: \"kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.670555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb\") pod \"80c8240c-c286-40a5-91c3-db77f4a01b8e\" (UID: \"80c8240c-c286-40a5-91c3-db77f4a01b8e\") " Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.682819 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s" (OuterVolumeSpecName: "kube-api-access-7sj9s") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "kube-api-access-7sj9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.714897 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.740526 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.745271 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.747415 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.755694 4982 generic.go:334] "Generic (PLEG): container finished" podID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerID="c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128" exitCode=0 Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.755950 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="cinder-scheduler" containerID="cri-o://6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08" gracePeriod=30 Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.756034 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.756049 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="probe" containerID="cri-o://9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968" gracePeriod=30 Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.772121 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.772256 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sj9s\" (UniqueName: \"kubernetes.io/projected/80c8240c-c286-40a5-91c3-db77f4a01b8e-kube-api-access-7sj9s\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.772335 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.772394 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.772445 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.773348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" event={"ID":"80c8240c-c286-40a5-91c3-db77f4a01b8e","Type":"ContainerDied","Data":"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128"} Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.773387 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-b5tfx" event={"ID":"80c8240c-c286-40a5-91c3-db77f4a01b8e","Type":"ContainerDied","Data":"7cbf1ef2eddee9268d2d82ac415fee1b95ec078c4b0d46793ca6f3aa3dc3a184"} Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.773407 4982 scope.go:117] "RemoveContainer" containerID="c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.786355 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config" (OuterVolumeSpecName: "config") pod "80c8240c-c286-40a5-91c3-db77f4a01b8e" (UID: "80c8240c-c286-40a5-91c3-db77f4a01b8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.797897 4982 scope.go:117] "RemoveContainer" containerID="c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.817945 4982 scope.go:117] "RemoveContainer" containerID="c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128" Jan 22 06:07:35 crc kubenswrapper[4982]: E0122 06:07:35.818665 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128\": container with ID starting with c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128 not found: ID does not exist" containerID="c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.818803 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128"} err="failed to get container status \"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128\": rpc error: code = NotFound desc = could not find container \"c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128\": container with ID starting with c0c5bd1dcbe3bf2491418869ac94c84b114cfec9f0eae089dcc89565a0575128 not found: ID does not exist" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.819149 4982 scope.go:117] "RemoveContainer" containerID="c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd" Jan 22 06:07:35 crc kubenswrapper[4982]: E0122 06:07:35.819685 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd\": container with ID starting with c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd not found: ID does not exist" containerID="c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.819727 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd"} err="failed to get container status \"c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd\": rpc error: code = NotFound desc = could not find container \"c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd\": container with ID starting with c04e95e86e12b2113c6af49fabba0b3e46675077affcf285be9979a22e966ecd not found: ID does not exist" Jan 22 06:07:35 crc kubenswrapper[4982]: I0122 06:07:35.875017 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c8240c-c286-40a5-91c3-db77f4a01b8e-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:36 crc kubenswrapper[4982]: I0122 06:07:36.105615 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:36 crc kubenswrapper[4982]: I0122 06:07:36.114609 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-b5tfx"] Jan 22 06:07:36 crc kubenswrapper[4982]: I0122 06:07:36.722839 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:07:36 crc kubenswrapper[4982]: I0122 06:07:36.775216 4982 generic.go:334] "Generic (PLEG): container finished" podID="270f0f59-6283-4756-9f65-d8034fae4770" containerID="9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968" exitCode=0 Jan 22 06:07:36 crc kubenswrapper[4982]: I0122 06:07:36.775275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerDied","Data":"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968"} Jan 22 06:07:37 crc kubenswrapper[4982]: I0122 06:07:37.087182 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 06:07:37 crc kubenswrapper[4982]: I0122 06:07:37.737904 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" path="/var/lib/kubelet/pods/80c8240c-c286-40a5-91c3-db77f4a01b8e/volumes" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.525914 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643151 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643496 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643633 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643699 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlk5m\" (UniqueName: \"kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643732 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.643784 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data\") pod \"270f0f59-6283-4756-9f65-d8034fae4770\" (UID: \"270f0f59-6283-4756-9f65-d8034fae4770\") " Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.645050 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.654709 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts" (OuterVolumeSpecName: "scripts") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.678078 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m" (OuterVolumeSpecName: "kube-api-access-nlk5m") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "kube-api-access-nlk5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.711316 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.745673 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/270f0f59-6283-4756-9f65-d8034fae4770-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.746572 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.746635 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlk5m\" (UniqueName: \"kubernetes.io/projected/270f0f59-6283-4756-9f65-d8034fae4770-kube-api-access-nlk5m\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.746697 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.803702 4982 generic.go:334] "Generic (PLEG): container finished" podID="270f0f59-6283-4756-9f65-d8034fae4770" containerID="6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08" exitCode=0 Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.803744 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerDied","Data":"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08"} Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.803770 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"270f0f59-6283-4756-9f65-d8034fae4770","Type":"ContainerDied","Data":"4ebc0015a3f9bd63a04b413985148d00f18d8762b72807870e6d8430eb307279"} Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.803787 4982 scope.go:117] "RemoveContainer" containerID="9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.803916 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.816975 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.833822 4982 scope.go:117] "RemoveContainer" containerID="6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.837619 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data" (OuterVolumeSpecName: "config-data") pod "270f0f59-6283-4756-9f65-d8034fae4770" (UID: "270f0f59-6283-4756-9f65-d8034fae4770"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.852454 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.852493 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/270f0f59-6283-4756-9f65-d8034fae4770-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.875697 4982 scope.go:117] "RemoveContainer" containerID="9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968" Jan 22 06:07:38 crc kubenswrapper[4982]: E0122 06:07:38.876125 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968\": container with ID starting with 9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968 not found: ID does not exist" containerID="9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.876158 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968"} err="failed to get container status \"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968\": rpc error: code = NotFound desc = could not find container \"9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968\": container with ID starting with 9f519973b89d462871fc80738124848d20c2909cd99ee109530b8d416e8ae968 not found: ID does not exist" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.876179 4982 scope.go:117] "RemoveContainer" containerID="6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08" Jan 22 06:07:38 crc kubenswrapper[4982]: E0122 06:07:38.876343 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08\": container with ID starting with 6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08 not found: ID does not exist" containerID="6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08" Jan 22 06:07:38 crc kubenswrapper[4982]: I0122 06:07:38.876365 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08"} err="failed to get container status \"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08\": rpc error: code = NotFound desc = could not find container \"6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08\": container with ID starting with 6776a493b4491ba173439616c7e02a238c9f2629b574f1c5bbcbd7817bfa2a08 not found: ID does not exist" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.132549 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.144584 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.161557 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.161943 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api-log" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.161965 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api-log" Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.161981 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="cinder-scheduler" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.161997 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="cinder-scheduler" Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.162011 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="init" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162017 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="init" Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.162035 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162041 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api" Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.162055 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="dnsmasq-dns" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162062 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="dnsmasq-dns" Jan 22 06:07:39 crc kubenswrapper[4982]: E0122 06:07:39.162077 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="probe" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162083 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="probe" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162247 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="80c8240c-c286-40a5-91c3-db77f4a01b8e" containerName="dnsmasq-dns" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162257 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="probe" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162269 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api-log" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162284 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="270f0f59-6283-4756-9f65-d8034fae4770" containerName="cinder-scheduler" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.162299 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a273f991-b7b0-4476-a0f5-80e6a439951b" containerName="barbican-api" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.163180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.169978 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.210398 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259166 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7svjg\" (UniqueName: \"kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259256 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259402 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259450 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.259565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361392 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7svjg\" (UniqueName: \"kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361526 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361621 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361645 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361692 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.361807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.365322 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.365709 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.370823 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.378875 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.379122 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7svjg\" (UniqueName: \"kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg\") pod \"cinder-scheduler-0\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.538158 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:07:39 crc kubenswrapper[4982]: I0122 06:07:39.752631 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="270f0f59-6283-4756-9f65-d8034fae4770" path="/var/lib/kubelet/pods/270f0f59-6283-4756-9f65-d8034fae4770/volumes" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.016464 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:07:40 crc kubenswrapper[4982]: W0122 06:07:40.023821 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd1c632c_d2c0_4f55_9727_af2ffbe1feef.slice/crio-39f411a18af3d736e8af0acfe927d92c3d3a879ecfeeffc9f57f44de77742d9e WatchSource:0}: Error finding container 39f411a18af3d736e8af0acfe927d92c3d3a879ecfeeffc9f57f44de77742d9e: Status 404 returned error can't find the container with id 39f411a18af3d736e8af0acfe927d92c3d3a879ecfeeffc9f57f44de77742d9e Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.099672 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.101687 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.103874 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.104198 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.104472 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-gjkmw" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.124420 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.183387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.183482 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.183549 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.183659 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kph9\" (UniqueName: \"kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.285057 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.285407 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.285532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.285664 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kph9\" (UniqueName: \"kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.286354 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.288960 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.304835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.307448 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kph9\" (UniqueName: \"kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9\") pod \"openstackclient\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.394550 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.395529 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.405959 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.417968 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.419105 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.488596 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.488643 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpnnd\" (UniqueName: \"kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.488668 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.488766 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.490506 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:40 crc kubenswrapper[4982]: E0122 06:07:40.584677 4982 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 22 06:07:40 crc kubenswrapper[4982]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_48f850ac-7f8e-43f2-8dae-6038d6bd0666_0(27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830" Netns:"/var/run/netns/35b7f43e-e728-4215-a544-d826fad0295e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830;K8S_POD_UID=48f850ac-7f8e-43f2-8dae-6038d6bd0666" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/48f850ac-7f8e-43f2-8dae-6038d6bd0666]: expected pod UID "48f850ac-7f8e-43f2-8dae-6038d6bd0666" but got "46bc1b22-5f45-461f-b090-9c13dd126a6f" from Kube API Jan 22 06:07:40 crc kubenswrapper[4982]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 22 06:07:40 crc kubenswrapper[4982]: > Jan 22 06:07:40 crc kubenswrapper[4982]: E0122 06:07:40.585035 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 22 06:07:40 crc kubenswrapper[4982]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_48f850ac-7f8e-43f2-8dae-6038d6bd0666_0(27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830" Netns:"/var/run/netns/35b7f43e-e728-4215-a544-d826fad0295e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=27bd63ca9cd0827a64d32010e8edc205130b16604077f8bc3442762c82a28830;K8S_POD_UID=48f850ac-7f8e-43f2-8dae-6038d6bd0666" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/48f850ac-7f8e-43f2-8dae-6038d6bd0666]: expected pod UID "48f850ac-7f8e-43f2-8dae-6038d6bd0666" but got "46bc1b22-5f45-461f-b090-9c13dd126a6f" from Kube API Jan 22 06:07:40 crc kubenswrapper[4982]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 22 06:07:40 crc kubenswrapper[4982]: > pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.590745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.590811 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpnnd\" (UniqueName: \"kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.590838 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.590938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.591912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.597579 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.597681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.607843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpnnd\" (UniqueName: \"kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd\") pod \"openstackclient\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.787939 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.842368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.842924 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerStarted","Data":"39f411a18af3d736e8af0acfe927d92c3d3a879ecfeeffc9f57f44de77742d9e"} Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.846577 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="48f850ac-7f8e-43f2-8dae-6038d6bd0666" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.852233 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.894804 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle\") pod \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.894836 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret\") pod \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.894880 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kph9\" (UniqueName: \"kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9\") pod \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.895054 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config\") pod \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\" (UID: \"48f850ac-7f8e-43f2-8dae-6038d6bd0666\") " Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.895704 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "48f850ac-7f8e-43f2-8dae-6038d6bd0666" (UID: "48f850ac-7f8e-43f2-8dae-6038d6bd0666"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.899616 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "48f850ac-7f8e-43f2-8dae-6038d6bd0666" (UID: "48f850ac-7f8e-43f2-8dae-6038d6bd0666"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.900493 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9" (OuterVolumeSpecName: "kube-api-access-8kph9") pod "48f850ac-7f8e-43f2-8dae-6038d6bd0666" (UID: "48f850ac-7f8e-43f2-8dae-6038d6bd0666"). InnerVolumeSpecName "kube-api-access-8kph9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.902457 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48f850ac-7f8e-43f2-8dae-6038d6bd0666" (UID: "48f850ac-7f8e-43f2-8dae-6038d6bd0666"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.997395 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.997727 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.997738 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/48f850ac-7f8e-43f2-8dae-6038d6bd0666-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:40 crc kubenswrapper[4982]: I0122 06:07:40.997747 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kph9\" (UniqueName: \"kubernetes.io/projected/48f850ac-7f8e-43f2-8dae-6038d6bd0666-kube-api-access-8kph9\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.014679 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.128821 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.129381 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5566dc6564-s4gjt" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-api" containerID="cri-o://4d6ddff127942ffff8e65c5a3983df0e512a4c51c2e9ba3184f2583448796e60" gracePeriod=30 Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.130170 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5566dc6564-s4gjt" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-httpd" containerID="cri-o://a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c" gracePeriod=30 Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.396320 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 06:07:41 crc kubenswrapper[4982]: E0122 06:07:41.409782 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ed422a7_07eb_4414_bae7_fcb9aaa199d9.slice/crio-conmon-a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c.scope\": RecentStats: unable to find data in memory cache]" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.742113 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f850ac-7f8e-43f2-8dae-6038d6bd0666" path="/var/lib/kubelet/pods/48f850ac-7f8e-43f2-8dae-6038d6bd0666/volumes" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.851905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerStarted","Data":"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d"} Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.851963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerStarted","Data":"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427"} Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.854792 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerDied","Data":"a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c"} Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.854655 4982 generic.go:334] "Generic (PLEG): container finished" podID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerID="a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c" exitCode=0 Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.855717 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"46bc1b22-5f45-461f-b090-9c13dd126a6f","Type":"ContainerStarted","Data":"721732ee776a7fa45bae4350006c5ef137e04ff60dab3393e22f48cb252d8575"} Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.855762 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.877205 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="48f850ac-7f8e-43f2-8dae-6038d6bd0666" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" Jan 22 06:07:41 crc kubenswrapper[4982]: I0122 06:07:41.878338 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.878328697 podStartE2EDuration="2.878328697s" podCreationTimestamp="2026-01-22 06:07:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:41.872923626 +0000 UTC m=+1322.711561629" watchObservedRunningTime="2026-01-22 06:07:41.878328697 +0000 UTC m=+1322.716966700" Jan 22 06:07:44 crc kubenswrapper[4982]: I0122 06:07:44.539840 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.564834 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.565117 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-central-agent" containerID="cri-o://0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c" gracePeriod=30 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.565170 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="sg-core" containerID="cri-o://31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1" gracePeriod=30 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.565222 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-notification-agent" containerID="cri-o://9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44" gracePeriod=30 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.565209 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" containerID="cri-o://4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53" gracePeriod=30 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.669277 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": read tcp 10.217.0.2:44784->10.217.0.161:3000: read: connection reset by peer" Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892376 4982 generic.go:334] "Generic (PLEG): container finished" podID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerID="4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53" exitCode=0 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892601 4982 generic.go:334] "Generic (PLEG): container finished" podID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerID="31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1" exitCode=2 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892610 4982 generic.go:334] "Generic (PLEG): container finished" podID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerID="0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c" exitCode=0 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892452 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerDied","Data":"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53"} Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerDied","Data":"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1"} Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.892676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerDied","Data":"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c"} Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.895346 4982 generic.go:334] "Generic (PLEG): container finished" podID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerID="4d6ddff127942ffff8e65c5a3983df0e512a4c51c2e9ba3184f2583448796e60" exitCode=0 Jan 22 06:07:45 crc kubenswrapper[4982]: I0122 06:07:45.895387 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerDied","Data":"4d6ddff127942ffff8e65c5a3983df0e512a4c51c2e9ba3184f2583448796e60"} Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.726106 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.728152 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.732976 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.751001 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.751262 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.757532 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807697 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807815 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807888 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx9xf\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807918 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807943 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.807974 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.808010 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910275 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910359 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910422 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.910694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx9xf\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.913404 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.913525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.919920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.920705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.921389 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.923098 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.942454 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:46 crc kubenswrapper[4982]: I0122 06:07:46.947398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx9xf\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf\") pod \"swift-proxy-6b4bc7b64f-sqvw9\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:47 crc kubenswrapper[4982]: I0122 06:07:47.047792 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:49 crc kubenswrapper[4982]: I0122 06:07:49.745779 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.266104 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.267024 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-httpd" containerID="cri-o://62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09" gracePeriod=30 Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.267249 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-log" containerID="cri-o://afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2" gracePeriod=30 Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.667421 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.704459 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs\") pod \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.704580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config\") pod \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.704613 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55zhm\" (UniqueName: \"kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm\") pod \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.704718 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config\") pod \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.704738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle\") pod \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\" (UID: \"2ed422a7-07eb-4414-bae7-fcb9aaa199d9\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.710236 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2ed422a7-07eb-4414-bae7-fcb9aaa199d9" (UID: "2ed422a7-07eb-4414-bae7-fcb9aaa199d9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.717485 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm" (OuterVolumeSpecName: "kube-api-access-55zhm") pod "2ed422a7-07eb-4414-bae7-fcb9aaa199d9" (UID: "2ed422a7-07eb-4414-bae7-fcb9aaa199d9"). InnerVolumeSpecName "kube-api-access-55zhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.749447 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.790090 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ed422a7-07eb-4414-bae7-fcb9aaa199d9" (UID: "2ed422a7-07eb-4414-bae7-fcb9aaa199d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.790141 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config" (OuterVolumeSpecName: "config") pod "2ed422a7-07eb-4414-bae7-fcb9aaa199d9" (UID: "2ed422a7-07eb-4414-bae7-fcb9aaa199d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.804977 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2ed422a7-07eb-4414-bae7-fcb9aaa199d9" (UID: "2ed422a7-07eb-4414-bae7-fcb9aaa199d9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.806825 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.806902 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807016 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhbnd\" (UniqueName: \"kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807090 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807204 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807231 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807254 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd\") pod \"b344d2d6-b86e-47d1-9e84-4e263485a947\" (UID: \"b344d2d6-b86e-47d1-9e84-4e263485a947\") " Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807725 4982 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807743 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807753 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55zhm\" (UniqueName: \"kubernetes.io/projected/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-kube-api-access-55zhm\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807767 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.807779 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed422a7-07eb-4414-bae7-fcb9aaa199d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.808307 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.811107 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.814059 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts" (OuterVolumeSpecName: "scripts") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.816833 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd" (OuterVolumeSpecName: "kube-api-access-dhbnd") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "kube-api-access-dhbnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.839300 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.875118 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909341 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909366 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909375 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909383 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909391 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b344d2d6-b86e-47d1-9e84-4e263485a947-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.909399 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhbnd\" (UniqueName: \"kubernetes.io/projected/b344d2d6-b86e-47d1-9e84-4e263485a947-kube-api-access-dhbnd\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.916261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data" (OuterVolumeSpecName: "config-data") pod "b344d2d6-b86e-47d1-9e84-4e263485a947" (UID: "b344d2d6-b86e-47d1-9e84-4e263485a947"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.972165 4982 generic.go:334] "Generic (PLEG): container finished" podID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerID="9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44" exitCode=0 Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.972228 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerDied","Data":"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44"} Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.972258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b344d2d6-b86e-47d1-9e84-4e263485a947","Type":"ContainerDied","Data":"50f993beac015d7d4c8d95f4c6871c56731d4f492a2c6c713a52a05c414a48f4"} Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.972276 4982 scope.go:117] "RemoveContainer" containerID="4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.972400 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.982290 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5566dc6564-s4gjt" event={"ID":"2ed422a7-07eb-4414-bae7-fcb9aaa199d9","Type":"ContainerDied","Data":"e16fd424ea8881539b7234c25b057b3ae2742238566ac87eec31a1739d13f738"} Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.982303 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5566dc6564-s4gjt" Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.988949 4982 generic.go:334] "Generic (PLEG): container finished" podID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerID="afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2" exitCode=143 Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.989071 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerDied","Data":"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2"} Jan 22 06:07:51 crc kubenswrapper[4982]: I0122 06:07:51.991040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"46bc1b22-5f45-461f-b090-9c13dd126a6f","Type":"ContainerStarted","Data":"c5436ddb9b05921d7b248ae7c45b009c65f9ad0e3d87b2a6e20de5649be61f53"} Jan 22 06:07:52 crc kubenswrapper[4982]: W0122 06:07:52.000678 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd4f5182_1143_4df0_93c1_33e344a1a204.slice/crio-2c8e942d6ed813c8223d98ac6012f17f7a1ceea3c31bdaf488c2ad3651dbc7ad WatchSource:0}: Error finding container 2c8e942d6ed813c8223d98ac6012f17f7a1ceea3c31bdaf488c2ad3651dbc7ad: Status 404 returned error can't find the container with id 2c8e942d6ed813c8223d98ac6012f17f7a1ceea3c31bdaf488c2ad3651dbc7ad Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.000713 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.002456 4982 scope.go:117] "RemoveContainer" containerID="31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.010623 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.096793108 podStartE2EDuration="12.010607063s" podCreationTimestamp="2026-01-22 06:07:40 +0000 UTC" firstStartedPulling="2026-01-22 06:07:41.426029205 +0000 UTC m=+1322.264667208" lastFinishedPulling="2026-01-22 06:07:51.33984316 +0000 UTC m=+1332.178481163" observedRunningTime="2026-01-22 06:07:52.007940653 +0000 UTC m=+1332.846578666" watchObservedRunningTime="2026-01-22 06:07:52.010607063 +0000 UTC m=+1332.849245066" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.010998 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b344d2d6-b86e-47d1-9e84-4e263485a947-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.034183 4982 scope.go:117] "RemoveContainer" containerID="9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.040446 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.047075 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.067250 4982 scope.go:117] "RemoveContainer" containerID="0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.070540 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084057 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084496 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-api" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084511 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-api" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084523 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-notification-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084531 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-notification-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084555 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084563 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084587 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084593 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084605 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="sg-core" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084611 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="sg-core" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.084624 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-central-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.084631 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-central-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.086740 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-api" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.086780 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" containerName="neutron-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.086795 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-notification-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.086825 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.088869 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="sg-core" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.088933 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="ceilometer-central-agent" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.103481 4982 scope.go:117] "RemoveContainer" containerID="4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.104065 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53\": container with ID starting with 4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53 not found: ID does not exist" containerID="4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104128 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53"} err="failed to get container status \"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53\": rpc error: code = NotFound desc = could not find container \"4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53\": container with ID starting with 4e439f7020132424a5b438314238b43906741194e87216ab4961b21aeb352c53 not found: ID does not exist" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104155 4982 scope.go:117] "RemoveContainer" containerID="31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.104463 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1\": container with ID starting with 31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1 not found: ID does not exist" containerID="31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104478 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1"} err="failed to get container status \"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1\": rpc error: code = NotFound desc = could not find container \"31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1\": container with ID starting with 31b3739206efab9692a7d4ef2a59517898b27c556eef089353eca84e5d2194f1 not found: ID does not exist" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104490 4982 scope.go:117] "RemoveContainer" containerID="9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.104652 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44\": container with ID starting with 9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44 not found: ID does not exist" containerID="9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104669 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44"} err="failed to get container status \"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44\": rpc error: code = NotFound desc = could not find container \"9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44\": container with ID starting with 9ad8537d18d405afe1f63e3afa6e20107eaa51440fd6f3cce64297ecaa11df44 not found: ID does not exist" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.104680 4982 scope.go:117] "RemoveContainer" containerID="0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c" Jan 22 06:07:52 crc kubenswrapper[4982]: E0122 06:07:52.107244 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c\": container with ID starting with 0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c not found: ID does not exist" containerID="0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.107283 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c"} err="failed to get container status \"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c\": rpc error: code = NotFound desc = could not find container \"0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c\": container with ID starting with 0b77220f21d4b65683b2e29a26b6544629d478bbf63f17518b7687cd1c70b26c not found: ID does not exist" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.107317 4982 scope.go:117] "RemoveContainer" containerID="a45fbec35662b2a7367b06c8ca35842a7d9b4b669a0e31848fe6dbe6b77ed63c" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.115370 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5566dc6564-s4gjt"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.115508 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.118178 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.118348 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.130202 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.151091 4982 scope.go:117] "RemoveContainer" containerID="4d6ddff127942ffff8e65c5a3983df0e512a4c51c2e9ba3184f2583448796e60" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219578 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tr9d\" (UniqueName: \"kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219613 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219699 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.219746 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.320904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.320950 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.320984 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321005 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321033 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321659 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tr9d\" (UniqueName: \"kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321679 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.321795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.326295 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.326724 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.328842 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.341799 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.343674 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tr9d\" (UniqueName: \"kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d\") pod \"ceilometer-0\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.344374 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.344605 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-log" containerID="cri-o://efc543e5393157011a31dd852ae15b06e5f944fc56ddcba577afd5e326c59667" gracePeriod=30 Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.344735 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-httpd" containerID="cri-o://f33bced3fdf444f850ce7aa8ab0dba78c9c52dc2edae2ac10bfb3499fc560375" gracePeriod=30 Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.439754 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.919882 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:52 crc kubenswrapper[4982]: I0122 06:07:52.933544 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.007040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerStarted","Data":"63c61da0ff110741a7f3993da86cf0bc0f9feff99e042a15814a1e560ef111fc"} Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.010935 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerStarted","Data":"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d"} Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.010984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerStarted","Data":"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4"} Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.010999 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerStarted","Data":"2c8e942d6ed813c8223d98ac6012f17f7a1ceea3c31bdaf488c2ad3651dbc7ad"} Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.012417 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.012450 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.013969 4982 generic.go:334] "Generic (PLEG): container finished" podID="dacae0a7-2b2d-44dc-a676-027055d56035" containerID="efc543e5393157011a31dd852ae15b06e5f944fc56ddcba577afd5e326c59667" exitCode=143 Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.014027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerDied","Data":"efc543e5393157011a31dd852ae15b06e5f944fc56ddcba577afd5e326c59667"} Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.034749 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" podStartSLOduration=7.034727065 podStartE2EDuration="7.034727065s" podCreationTimestamp="2026-01-22 06:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:53.032552558 +0000 UTC m=+1333.871190571" watchObservedRunningTime="2026-01-22 06:07:53.034727065 +0000 UTC m=+1333.873365078" Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.729786 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ed422a7-07eb-4414-bae7-fcb9aaa199d9" path="/var/lib/kubelet/pods/2ed422a7-07eb-4414-bae7-fcb9aaa199d9/volumes" Jan 22 06:07:53 crc kubenswrapper[4982]: I0122 06:07:53.730951 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" path="/var/lib/kubelet/pods/b344d2d6-b86e-47d1-9e84-4e263485a947/volumes" Jan 22 06:07:54 crc kubenswrapper[4982]: I0122 06:07:54.025244 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerStarted","Data":"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955"} Jan 22 06:07:54 crc kubenswrapper[4982]: I0122 06:07:54.025288 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerStarted","Data":"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755"} Jan 22 06:07:54 crc kubenswrapper[4982]: I0122 06:07:54.901364 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.034081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerStarted","Data":"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f"} Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.036590 4982 generic.go:334] "Generic (PLEG): container finished" podID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerID="62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09" exitCode=0 Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.036648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerDied","Data":"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09"} Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.036673 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"857e17c8-bcb3-4632-af28-27fe80d6fe2d","Type":"ContainerDied","Data":"b33e9a8b52662c00f5a92430e31798aef4291003dde477d2651f2f8b8caccebb"} Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.036691 4982 scope.go:117] "RemoveContainer" containerID="62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.036637 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.057426 4982 scope.go:117] "RemoveContainer" containerID="afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069300 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069350 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069623 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069727 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069843 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bhqk\" (UniqueName: \"kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069916 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.069951 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs" (OuterVolumeSpecName: "logs") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.070016 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.070041 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle\") pod \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\" (UID: \"857e17c8-bcb3-4632-af28-27fe80d6fe2d\") " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.070236 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.070985 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.071015 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/857e17c8-bcb3-4632-af28-27fe80d6fe2d-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.077975 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts" (OuterVolumeSpecName: "scripts") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.078065 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk" (OuterVolumeSpecName: "kube-api-access-6bhqk") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "kube-api-access-6bhqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.078106 4982 scope.go:117] "RemoveContainer" containerID="62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09" Jan 22 06:07:55 crc kubenswrapper[4982]: E0122 06:07:55.079988 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09\": container with ID starting with 62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09 not found: ID does not exist" containerID="62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.080021 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09"} err="failed to get container status \"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09\": rpc error: code = NotFound desc = could not find container \"62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09\": container with ID starting with 62770d9d28bf055b1d32a30e4344efe7af7236f32f97857d950427878d78db09 not found: ID does not exist" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.080044 4982 scope.go:117] "RemoveContainer" containerID="afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.080987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: E0122 06:07:55.082010 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2\": container with ID starting with afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2 not found: ID does not exist" containerID="afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.082044 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2"} err="failed to get container status \"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2\": rpc error: code = NotFound desc = could not find container \"afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2\": container with ID starting with afa461cfe224988c704dc7316c5e22bc1c484aa32d820eb8e724f5d3932f08a2 not found: ID does not exist" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.119986 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.135991 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data" (OuterVolumeSpecName: "config-data") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.143657 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "857e17c8-bcb3-4632-af28-27fe80d6fe2d" (UID: "857e17c8-bcb3-4632-af28-27fe80d6fe2d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172224 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172268 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172300 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bhqk\" (UniqueName: \"kubernetes.io/projected/857e17c8-bcb3-4632-af28-27fe80d6fe2d-kube-api-access-6bhqk\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172314 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172344 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.172359 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857e17c8-bcb3-4632-af28-27fe80d6fe2d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.194965 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.274098 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.388985 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.393961 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.404966 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:55 crc kubenswrapper[4982]: E0122 06:07:55.405354 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-log" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.405380 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-log" Jan 22 06:07:55 crc kubenswrapper[4982]: E0122 06:07:55.405404 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-httpd" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.405411 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-httpd" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.405585 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-log" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.405603 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" containerName="glance-httpd" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.406452 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.408483 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.410170 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.418977 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.581895 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582267 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582296 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj245\" (UniqueName: \"kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582368 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582453 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582488 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.582556 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684191 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684307 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684340 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684391 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj245\" (UniqueName: \"kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.684761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.685049 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.685116 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.685807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.686166 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.689966 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.690230 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.690382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.690868 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.716603 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.719893 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj245\" (UniqueName: \"kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245\") pod \"glance-default-external-api-0\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " pod="openstack/glance-default-external-api-0" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.730249 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857e17c8-bcb3-4632-af28-27fe80d6fe2d" path="/var/lib/kubelet/pods/857e17c8-bcb3-4632-af28-27fe80d6fe2d/volumes" Jan 22 06:07:55 crc kubenswrapper[4982]: I0122 06:07:55.731374 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:07:56 crc kubenswrapper[4982]: I0122 06:07:56.047295 4982 generic.go:334] "Generic (PLEG): container finished" podID="dacae0a7-2b2d-44dc-a676-027055d56035" containerID="f33bced3fdf444f850ce7aa8ab0dba78c9c52dc2edae2ac10bfb3499fc560375" exitCode=0 Jan 22 06:07:56 crc kubenswrapper[4982]: I0122 06:07:56.047551 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerDied","Data":"f33bced3fdf444f850ce7aa8ab0dba78c9c52dc2edae2ac10bfb3499fc560375"} Jan 22 06:07:56 crc kubenswrapper[4982]: I0122 06:07:56.297895 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:07:56 crc kubenswrapper[4982]: W0122 06:07:56.306964 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac0dc6e4_35d9_4d06_93af_a8758648aa13.slice/crio-1f9576f987deb791cb03e0124573345022309bef7341248f8305569cc09baf0b WatchSource:0}: Error finding container 1f9576f987deb791cb03e0124573345022309bef7341248f8305569cc09baf0b: Status 404 returned error can't find the container with id 1f9576f987deb791cb03e0124573345022309bef7341248f8305569cc09baf0b Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.072635 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.076652 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.082111 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerStarted","Data":"c3a1f31f2494aee180667a0f03331fe892eadeaa196337253d864cc858d0d3f1"} Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.082153 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerStarted","Data":"1f9576f987deb791cb03e0124573345022309bef7341248f8305569cc09baf0b"} Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.537703 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728297 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728368 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728410 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728448 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88hpw\" (UniqueName: \"kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728501 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728538 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728561 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.728615 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts\") pod \"dacae0a7-2b2d-44dc-a676-027055d56035\" (UID: \"dacae0a7-2b2d-44dc-a676-027055d56035\") " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.729379 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.729398 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs" (OuterVolumeSpecName: "logs") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.736189 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts" (OuterVolumeSpecName: "scripts") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.736283 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw" (OuterVolumeSpecName: "kube-api-access-88hpw") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "kube-api-access-88hpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.743013 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.786937 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.787423 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data" (OuterVolumeSpecName: "config-data") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.804143 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dacae0a7-2b2d-44dc-a676-027055d56035" (UID: "dacae0a7-2b2d-44dc-a676-027055d56035"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840479 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840533 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840543 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840553 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840563 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dacae0a7-2b2d-44dc-a676-027055d56035-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840588 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840599 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88hpw\" (UniqueName: \"kubernetes.io/projected/dacae0a7-2b2d-44dc-a676-027055d56035-kube-api-access-88hpw\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.840614 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dacae0a7-2b2d-44dc-a676-027055d56035-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.877229 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.947829 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:57 crc kubenswrapper[4982]: I0122 06:07:57.996533 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.098888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerStarted","Data":"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e"} Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.099009 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-central-agent" containerID="cri-o://8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755" gracePeriod=30 Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.099066 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.099125 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="proxy-httpd" containerID="cri-o://3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e" gracePeriod=30 Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.099167 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="sg-core" containerID="cri-o://8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f" gracePeriod=30 Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.099221 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-notification-agent" containerID="cri-o://750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955" gracePeriod=30 Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.107056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerStarted","Data":"33fb07736ff02fa3a7d27e62002c81a800f65f43ed61da9bdbe44cbe612b4ebe"} Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.114828 4982 generic.go:334] "Generic (PLEG): container finished" podID="ec319049-80f6-4114-b54e-085e69caf06a" containerID="d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b" exitCode=137 Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.114964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerDied","Data":"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b"} Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.115014 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec319049-80f6-4114-b54e-085e69caf06a","Type":"ContainerDied","Data":"e87070446f03e3ad08fcd80460ecfb39ebd393de2c28fa398107039f6a29d729"} Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.115032 4982 scope.go:117] "RemoveContainer" containerID="d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.115149 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.122931 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dacae0a7-2b2d-44dc-a676-027055d56035","Type":"ContainerDied","Data":"0fbac9aadbb4d6d403e07227d9ebaad3515253d708b84366b38262fc6e69358c"} Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.123024 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.130439 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.048090178 podStartE2EDuration="6.130418537s" podCreationTimestamp="2026-01-22 06:07:52 +0000 UTC" firstStartedPulling="2026-01-22 06:07:52.916262065 +0000 UTC m=+1333.754900088" lastFinishedPulling="2026-01-22 06:07:56.998590444 +0000 UTC m=+1337.837228447" observedRunningTime="2026-01-22 06:07:58.123431144 +0000 UTC m=+1338.962069167" watchObservedRunningTime="2026-01-22 06:07:58.130418537 +0000 UTC m=+1338.969056540" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.160302 4982 scope.go:117] "RemoveContainer" containerID="92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.167200 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.167167592 podStartE2EDuration="3.167167592s" podCreationTimestamp="2026-01-22 06:07:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:07:58.160641653 +0000 UTC m=+1338.999279666" watchObservedRunningTime="2026-01-22 06:07:58.167167592 +0000 UTC m=+1339.005805595" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171397 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171483 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbbnh\" (UniqueName: \"kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171609 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171636 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.171680 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data\") pod \"ec319049-80f6-4114-b54e-085e69caf06a\" (UID: \"ec319049-80f6-4114-b54e-085e69caf06a\") " Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.173032 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.173511 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs" (OuterVolumeSpecName: "logs") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.190952 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.191165 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts" (OuterVolumeSpecName: "scripts") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.193087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh" (OuterVolumeSpecName: "kube-api-access-tbbnh") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "kube-api-access-tbbnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.196070 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.199178 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204114 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.204439 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api-log" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204450 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api-log" Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.204463 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-httpd" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204468 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-httpd" Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.204489 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204496 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api" Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.204505 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-log" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204511 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-log" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204667 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-log" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204680 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" containerName="glance-httpd" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204687 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api-log" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.204701 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec319049-80f6-4114-b54e-085e69caf06a" containerName="cinder-api" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.208220 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.213130 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.230711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.230961 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.264502 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274051 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec319049-80f6-4114-b54e-085e69caf06a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274083 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbbnh\" (UniqueName: \"kubernetes.io/projected/ec319049-80f6-4114-b54e-085e69caf06a-kube-api-access-tbbnh\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274092 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274102 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274114 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec319049-80f6-4114-b54e-085e69caf06a-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.274123 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.291993 4982 scope.go:117] "RemoveContainer" containerID="d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b" Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.294896 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b\": container with ID starting with d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b not found: ID does not exist" containerID="d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.294929 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b"} err="failed to get container status \"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b\": rpc error: code = NotFound desc = could not find container \"d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b\": container with ID starting with d959af241d0e4a66952d4796f6c1d6cc4bf06a0e4b3a7967e2c465c1b67bab7b not found: ID does not exist" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.294954 4982 scope.go:117] "RemoveContainer" containerID="92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3" Jan 22 06:07:58 crc kubenswrapper[4982]: E0122 06:07:58.299064 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3\": container with ID starting with 92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3 not found: ID does not exist" containerID="92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.299116 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3"} err="failed to get container status \"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3\": rpc error: code = NotFound desc = could not find container \"92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3\": container with ID starting with 92e031bbff0d801e63e3b6bf3a0ec01fa17bc957883d7d6794dfe076c023c6c3 not found: ID does not exist" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.299162 4982 scope.go:117] "RemoveContainer" containerID="f33bced3fdf444f850ce7aa8ab0dba78c9c52dc2edae2ac10bfb3499fc560375" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.334603 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data" (OuterVolumeSpecName: "config-data") pod "ec319049-80f6-4114-b54e-085e69caf06a" (UID: "ec319049-80f6-4114-b54e-085e69caf06a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376440 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376531 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz95l\" (UniqueName: \"kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376598 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376635 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.376998 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.377109 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.377152 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.377427 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec319049-80f6-4114-b54e-085e69caf06a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.420409 4982 scope.go:117] "RemoveContainer" containerID="efc543e5393157011a31dd852ae15b06e5f944fc56ddcba577afd5e326c59667" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.454967 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.464228 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.472499 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.474691 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478027 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478244 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478372 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478555 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz95l\" (UniqueName: \"kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478600 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478629 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478653 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478709 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478763 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.478790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.481349 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.483208 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.483836 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.484600 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.485173 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.485700 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.488012 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.488493 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.507299 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz95l\" (UniqueName: \"kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.515485 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.561554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580536 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmt2g\" (UniqueName: \"kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580580 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580659 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580959 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.580999 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683256 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683299 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683369 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.683843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.684075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmt2g\" (UniqueName: \"kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.684128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.684156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.684210 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.687559 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.687715 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.689093 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.689321 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.703009 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.703027 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.706501 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmt2g\" (UniqueName: \"kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g\") pod \"cinder-api-0\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " pod="openstack/cinder-api-0" Jan 22 06:07:58 crc kubenswrapper[4982]: I0122 06:07:58.862962 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.072508 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:07:59 crc kubenswrapper[4982]: W0122 06:07:59.077079 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d77f6f1_5c7d_45e5_92e5_8e333d91c020.slice/crio-4d79dd43059edfedd4ad44a90b967317104e74eee098c9202db37016f5f2ecb1 WatchSource:0}: Error finding container 4d79dd43059edfedd4ad44a90b967317104e74eee098c9202db37016f5f2ecb1: Status 404 returned error can't find the container with id 4d79dd43059edfedd4ad44a90b967317104e74eee098c9202db37016f5f2ecb1 Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.150235 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerStarted","Data":"4d79dd43059edfedd4ad44a90b967317104e74eee098c9202db37016f5f2ecb1"} Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154666 4982 generic.go:334] "Generic (PLEG): container finished" podID="abece5e4-1910-47c5-9bf6-90056697065b" containerID="3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e" exitCode=0 Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154688 4982 generic.go:334] "Generic (PLEG): container finished" podID="abece5e4-1910-47c5-9bf6-90056697065b" containerID="8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f" exitCode=2 Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154697 4982 generic.go:334] "Generic (PLEG): container finished" podID="abece5e4-1910-47c5-9bf6-90056697065b" containerID="750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955" exitCode=0 Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerDied","Data":"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e"} Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerDied","Data":"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f"} Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.154753 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerDied","Data":"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955"} Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.336820 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.745789 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dacae0a7-2b2d-44dc-a676-027055d56035" path="/var/lib/kubelet/pods/dacae0a7-2b2d-44dc-a676-027055d56035/volumes" Jan 22 06:07:59 crc kubenswrapper[4982]: I0122 06:07:59.749281 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec319049-80f6-4114-b54e-085e69caf06a" path="/var/lib/kubelet/pods/ec319049-80f6-4114-b54e-085e69caf06a/volumes" Jan 22 06:08:00 crc kubenswrapper[4982]: I0122 06:08:00.169745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerStarted","Data":"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb"} Jan 22 06:08:00 crc kubenswrapper[4982]: I0122 06:08:00.170091 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerStarted","Data":"e27d0ca0c3c80eae3ef5046e6841df64657571a68ff0e08f536c7206324926f0"} Jan 22 06:08:00 crc kubenswrapper[4982]: I0122 06:08:00.171246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerStarted","Data":"2402516dd72b79b7045a0fea1c29ce7e8868f7e5288ff8c3baf2331b568a9fc7"} Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.007885 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164194 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164572 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164607 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164638 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tr9d\" (UniqueName: \"kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164686 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164726 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.164796 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data\") pod \"abece5e4-1910-47c5-9bf6-90056697065b\" (UID: \"abece5e4-1910-47c5-9bf6-90056697065b\") " Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.165653 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.165698 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.170457 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts" (OuterVolumeSpecName: "scripts") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.172029 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d" (OuterVolumeSpecName: "kube-api-access-6tr9d") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "kube-api-access-6tr9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.181720 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerStarted","Data":"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73"} Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.181823 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.183298 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerStarted","Data":"92aa96879b03afd4dc777ec25243c37e4ffc13dea924e62623d2262a8332c39f"} Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.189101 4982 generic.go:334] "Generic (PLEG): container finished" podID="abece5e4-1910-47c5-9bf6-90056697065b" containerID="8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755" exitCode=0 Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.189144 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerDied","Data":"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755"} Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.189177 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abece5e4-1910-47c5-9bf6-90056697065b","Type":"ContainerDied","Data":"63c61da0ff110741a7f3993da86cf0bc0f9feff99e042a15814a1e560ef111fc"} Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.189195 4982 scope.go:117] "RemoveContainer" containerID="3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.189325 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.219236 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.219212669 podStartE2EDuration="3.219212669s" podCreationTimestamp="2026-01-22 06:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:01.203210913 +0000 UTC m=+1342.041848916" watchObservedRunningTime="2026-01-22 06:08:01.219212669 +0000 UTC m=+1342.057850662" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.236931 4982 scope.go:117] "RemoveContainer" containerID="8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.242083 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.242060984 podStartE2EDuration="3.242060984s" podCreationTimestamp="2026-01-22 06:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:01.23732476 +0000 UTC m=+1342.075962763" watchObservedRunningTime="2026-01-22 06:08:01.242060984 +0000 UTC m=+1342.080698987" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.250425 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.251649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.267153 4982 scope.go:117] "RemoveContainer" containerID="750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268287 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268325 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268335 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tr9d\" (UniqueName: \"kubernetes.io/projected/abece5e4-1910-47c5-9bf6-90056697065b-kube-api-access-6tr9d\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268345 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268354 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.268362 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abece5e4-1910-47c5-9bf6-90056697065b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.286474 4982 scope.go:117] "RemoveContainer" containerID="8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.294798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data" (OuterVolumeSpecName: "config-data") pod "abece5e4-1910-47c5-9bf6-90056697065b" (UID: "abece5e4-1910-47c5-9bf6-90056697065b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.309685 4982 scope.go:117] "RemoveContainer" containerID="3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.310131 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e\": container with ID starting with 3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e not found: ID does not exist" containerID="3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.310181 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e"} err="failed to get container status \"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e\": rpc error: code = NotFound desc = could not find container \"3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e\": container with ID starting with 3cf135ca21c100f3ce0e6c40d8c188450a62c5c4cb5b43a3dacda33eddf3e37e not found: ID does not exist" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.310211 4982 scope.go:117] "RemoveContainer" containerID="8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.310651 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f\": container with ID starting with 8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f not found: ID does not exist" containerID="8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.310699 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f"} err="failed to get container status \"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f\": rpc error: code = NotFound desc = could not find container \"8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f\": container with ID starting with 8d6846310ff71b6d912d20ee69b25cd411b24275c0d591ca6bfe3f9a9363ca1f not found: ID does not exist" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.310730 4982 scope.go:117] "RemoveContainer" containerID="750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.311059 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955\": container with ID starting with 750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955 not found: ID does not exist" containerID="750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.311084 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955"} err="failed to get container status \"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955\": rpc error: code = NotFound desc = could not find container \"750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955\": container with ID starting with 750ee879913e471b7ab424d342338378ff5c724bc45571cbb4013a670a051955 not found: ID does not exist" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.311108 4982 scope.go:117] "RemoveContainer" containerID="8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.311365 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755\": container with ID starting with 8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755 not found: ID does not exist" containerID="8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.311391 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755"} err="failed to get container status \"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755\": rpc error: code = NotFound desc = could not find container \"8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755\": container with ID starting with 8d130305a964d24d975d7d00f2c576fff4f62206523ba9ed6b96535002d4e755 not found: ID does not exist" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.369434 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abece5e4-1910-47c5-9bf6-90056697065b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.523045 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.529554 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.549938 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.550295 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="proxy-httpd" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550310 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="proxy-httpd" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.550328 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="sg-core" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550333 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="sg-core" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.550356 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-central-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550362 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-central-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: E0122 06:08:01.550382 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-notification-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550388 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-notification-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550574 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-central-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550587 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="ceilometer-notification-agent" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550595 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="proxy-httpd" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.550607 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abece5e4-1910-47c5-9bf6-90056697065b" containerName="sg-core" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.552283 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.557099 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.557242 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.569729 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.674916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.674975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.675002 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.675029 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxrcp\" (UniqueName: \"kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.675053 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.675068 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.675178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.733818 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abece5e4-1910-47c5-9bf6-90056697065b" path="/var/lib/kubelet/pods/abece5e4-1910-47c5-9bf6-90056697065b/volumes" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785352 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785396 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785435 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxrcp\" (UniqueName: \"kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.785571 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.792371 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.792768 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.796480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.799087 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.800691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.800833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.817664 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxrcp\" (UniqueName: \"kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp\") pod \"ceilometer-0\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " pod="openstack/ceilometer-0" Jan 22 06:08:01 crc kubenswrapper[4982]: I0122 06:08:01.870353 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:02 crc kubenswrapper[4982]: W0122 06:08:02.350260 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08c83842_d8b5_4f79_98d5_8822a6716046.slice/crio-afdaedf754dc020941f1936e4ca11bc7a797e51f9b29803e51c029fd03393a76 WatchSource:0}: Error finding container afdaedf754dc020941f1936e4ca11bc7a797e51f9b29803e51c029fd03393a76: Status 404 returned error can't find the container with id afdaedf754dc020941f1936e4ca11bc7a797e51f9b29803e51c029fd03393a76 Jan 22 06:08:02 crc kubenswrapper[4982]: I0122 06:08:02.351375 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:03 crc kubenswrapper[4982]: I0122 06:08:03.219610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerStarted","Data":"afdaedf754dc020941f1936e4ca11bc7a797e51f9b29803e51c029fd03393a76"} Jan 22 06:08:04 crc kubenswrapper[4982]: I0122 06:08:04.232415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerStarted","Data":"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f"} Jan 22 06:08:04 crc kubenswrapper[4982]: I0122 06:08:04.232762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerStarted","Data":"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425"} Jan 22 06:08:05 crc kubenswrapper[4982]: I0122 06:08:05.250489 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerStarted","Data":"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428"} Jan 22 06:08:05 crc kubenswrapper[4982]: I0122 06:08:05.735844 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 06:08:05 crc kubenswrapper[4982]: I0122 06:08:05.736120 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 06:08:05 crc kubenswrapper[4982]: I0122 06:08:05.791890 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 06:08:05 crc kubenswrapper[4982]: I0122 06:08:05.800147 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 06:08:06 crc kubenswrapper[4982]: I0122 06:08:06.263122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerStarted","Data":"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61"} Jan 22 06:08:06 crc kubenswrapper[4982]: I0122 06:08:06.263415 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 06:08:06 crc kubenswrapper[4982]: I0122 06:08:06.263448 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:08:06 crc kubenswrapper[4982]: I0122 06:08:06.263458 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 06:08:06 crc kubenswrapper[4982]: I0122 06:08:06.297970 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9541680750000001 podStartE2EDuration="5.29795286s" podCreationTimestamp="2026-01-22 06:08:01 +0000 UTC" firstStartedPulling="2026-01-22 06:08:02.353609318 +0000 UTC m=+1343.192247321" lastFinishedPulling="2026-01-22 06:08:05.697394093 +0000 UTC m=+1346.536032106" observedRunningTime="2026-01-22 06:08:06.29141932 +0000 UTC m=+1347.130057323" watchObservedRunningTime="2026-01-22 06:08:06.29795286 +0000 UTC m=+1347.136590863" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.349894 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-7pgw9"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.351694 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.360125 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7pgw9"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.500989 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-z4p9q"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.502477 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.508569 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a886-account-create-update-5tlsd"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.509589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.512496 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qkf\" (UniqueName: \"kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.512581 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.519107 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.536108 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z4p9q"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.562576 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a886-account-create-update-5tlsd"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrlld\" (UniqueName: \"kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614848 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614892 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl8s4\" (UniqueName: \"kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614919 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9qkf\" (UniqueName: \"kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.614985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.615729 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.624918 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-kx85p"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.626065 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.632637 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9qkf\" (UniqueName: \"kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf\") pod \"nova-api-db-create-7pgw9\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.652932 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-kx85p"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.688163 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.707064 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-777f-account-create-update-r5m24"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.708590 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.712540 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.714209 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-r5m24"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716286 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrlld\" (UniqueName: \"kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716395 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nwnq\" (UniqueName: \"kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716428 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.716508 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl8s4\" (UniqueName: \"kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.717437 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.717883 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.761373 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrlld\" (UniqueName: \"kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld\") pod \"nova-api-a886-account-create-update-5tlsd\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.761513 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl8s4\" (UniqueName: \"kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4\") pod \"nova-cell0-db-create-z4p9q\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.822117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nwnq\" (UniqueName: \"kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.822161 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.822190 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bkhg\" (UniqueName: \"kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.822299 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.826092 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.833990 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.847687 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nwnq\" (UniqueName: \"kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq\") pod \"nova-cell1-db-create-kx85p\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.857866 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.920921 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-p5xg6"] Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.922086 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.924337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.924401 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bkhg\" (UniqueName: \"kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.925410 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.954463 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bkhg\" (UniqueName: \"kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg\") pod \"nova-cell0-777f-account-create-update-r5m24\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.950739 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 06:08:07 crc kubenswrapper[4982]: I0122 06:08:07.955169 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-p5xg6"] Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.016443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.027196 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcqx5\" (UniqueName: \"kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.027794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.107544 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.129515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.131127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcqx5\" (UniqueName: \"kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.131048 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.148622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcqx5\" (UniqueName: \"kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5\") pod \"nova-cell1-bda6-account-create-update-p5xg6\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.283606 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.283630 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.288948 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.303127 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.334016 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7pgw9"] Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.432432 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z4p9q"] Jan 22 06:08:08 crc kubenswrapper[4982]: W0122 06:08:08.457921 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaff6a5d2_6643_4aa8_8427_c01cfc232232.slice/crio-52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254 WatchSource:0}: Error finding container 52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254: Status 404 returned error can't find the container with id 52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254 Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.545666 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-r5m24"] Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.562137 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.562196 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.573567 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a886-account-create-update-5tlsd"] Jan 22 06:08:08 crc kubenswrapper[4982]: W0122 06:08:08.586703 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f867733_84f1_4d24_8b1d_a46f5f9a6ff9.slice/crio-372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2 WatchSource:0}: Error finding container 372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2: Status 404 returned error can't find the container with id 372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2 Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.603182 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.622633 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-kx85p"] Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.673512 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.709466 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 06:08:08 crc kubenswrapper[4982]: I0122 06:08:08.890791 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-p5xg6"] Jan 22 06:08:08 crc kubenswrapper[4982]: W0122 06:08:08.922788 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae2b03b5_a7a9_4da1_bf23_8b589c508d26.slice/crio-55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145 WatchSource:0}: Error finding container 55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145: Status 404 returned error can't find the container with id 55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145 Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.290281 4982 generic.go:334] "Generic (PLEG): container finished" podID="56abf6c5-1b18-45ab-ab46-8359b05d5a19" containerID="eb0c40ec25acf7dd254ab07ffed2db26083e872714d82529004cbcb0eb6f9efc" exitCode=0 Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.290342 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7pgw9" event={"ID":"56abf6c5-1b18-45ab-ab46-8359b05d5a19","Type":"ContainerDied","Data":"eb0c40ec25acf7dd254ab07ffed2db26083e872714d82529004cbcb0eb6f9efc"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.290368 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7pgw9" event={"ID":"56abf6c5-1b18-45ab-ab46-8359b05d5a19","Type":"ContainerStarted","Data":"336ee4628fe407f22cd65a70a74f21912633f858e60cc878400b300c50a0780f"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.291754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-r5m24" event={"ID":"889acc8d-10d5-490f-b9b7-d0514b16e62f","Type":"ContainerStarted","Data":"194a13dd0cc2fbf71ccad2e58ec061d6e3b65c7e8864d90cde9282b9aa6f28ee"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.291785 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-r5m24" event={"ID":"889acc8d-10d5-490f-b9b7-d0514b16e62f","Type":"ContainerStarted","Data":"5740809c2c42f559bc2caa45e83baa4280b8163d7aa36f15df65b2711ec12ba8"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.293778 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kx85p" event={"ID":"29a36a50-28da-44d3-934a-89bcf37ac576","Type":"ContainerStarted","Data":"726972a3f7460ea2bdb05ff75bb24b29dea55fb5c63cd73a84097c5546b25300"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.293806 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kx85p" event={"ID":"29a36a50-28da-44d3-934a-89bcf37ac576","Type":"ContainerStarted","Data":"a12062de8b7a017b8c12cc8d0f03cdade4ec516008f6055ab3139141a3f403e1"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.296840 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-5tlsd" event={"ID":"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9","Type":"ContainerStarted","Data":"8da7b8a952ce6725bd6a1fdde48ae9f4e266b5c3da9591407a37d95476e23e5c"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.296879 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-5tlsd" event={"ID":"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9","Type":"ContainerStarted","Data":"372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.298680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" event={"ID":"ae2b03b5-a7a9-4da1-bf23-8b589c508d26","Type":"ContainerStarted","Data":"ea9f4629d193aa5031d52f0fba91720a98d8fd32d9a98e1f798ded1fb349ec0d"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.298703 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" event={"ID":"ae2b03b5-a7a9-4da1-bf23-8b589c508d26","Type":"ContainerStarted","Data":"55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.300660 4982 generic.go:334] "Generic (PLEG): container finished" podID="aff6a5d2-6643-4aa8-8427-c01cfc232232" containerID="e10d5991711ac7163d6ba6df5bcfcc6f76d125dba4e9363a2de9e4bb3ba354b1" exitCode=0 Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.300968 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4p9q" event={"ID":"aff6a5d2-6643-4aa8-8427-c01cfc232232","Type":"ContainerDied","Data":"e10d5991711ac7163d6ba6df5bcfcc6f76d125dba4e9363a2de9e4bb3ba354b1"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.301006 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4p9q" event={"ID":"aff6a5d2-6643-4aa8-8427-c01cfc232232","Type":"ContainerStarted","Data":"52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254"} Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.301568 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.301735 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.346054 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-777f-account-create-update-r5m24" podStartSLOduration=2.346028604 podStartE2EDuration="2.346028604s" podCreationTimestamp="2026-01-22 06:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:09.3370566 +0000 UTC m=+1350.175694603" watchObservedRunningTime="2026-01-22 06:08:09.346028604 +0000 UTC m=+1350.184666607" Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.350612 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-a886-account-create-update-5tlsd" podStartSLOduration=2.350597003 podStartE2EDuration="2.350597003s" podCreationTimestamp="2026-01-22 06:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:09.323096297 +0000 UTC m=+1350.161734300" watchObservedRunningTime="2026-01-22 06:08:09.350597003 +0000 UTC m=+1350.189235016" Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.373805 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" podStartSLOduration=2.373698183 podStartE2EDuration="2.373698183s" podCreationTimestamp="2026-01-22 06:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:09.373112758 +0000 UTC m=+1350.211750761" watchObservedRunningTime="2026-01-22 06:08:09.373698183 +0000 UTC m=+1350.212336186" Jan 22 06:08:09 crc kubenswrapper[4982]: I0122 06:08:09.390903 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-kx85p" podStartSLOduration=2.3908875 podStartE2EDuration="2.3908875s" podCreationTimestamp="2026-01-22 06:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:09.388231822 +0000 UTC m=+1350.226869825" watchObservedRunningTime="2026-01-22 06:08:09.3908875 +0000 UTC m=+1350.229525503" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.314389 4982 generic.go:334] "Generic (PLEG): container finished" podID="889acc8d-10d5-490f-b9b7-d0514b16e62f" containerID="194a13dd0cc2fbf71ccad2e58ec061d6e3b65c7e8864d90cde9282b9aa6f28ee" exitCode=0 Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.314593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-r5m24" event={"ID":"889acc8d-10d5-490f-b9b7-d0514b16e62f","Type":"ContainerDied","Data":"194a13dd0cc2fbf71ccad2e58ec061d6e3b65c7e8864d90cde9282b9aa6f28ee"} Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.320740 4982 generic.go:334] "Generic (PLEG): container finished" podID="29a36a50-28da-44d3-934a-89bcf37ac576" containerID="726972a3f7460ea2bdb05ff75bb24b29dea55fb5c63cd73a84097c5546b25300" exitCode=0 Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.320924 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kx85p" event={"ID":"29a36a50-28da-44d3-934a-89bcf37ac576","Type":"ContainerDied","Data":"726972a3f7460ea2bdb05ff75bb24b29dea55fb5c63cd73a84097c5546b25300"} Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.323296 4982 generic.go:334] "Generic (PLEG): container finished" podID="2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" containerID="8da7b8a952ce6725bd6a1fdde48ae9f4e266b5c3da9591407a37d95476e23e5c" exitCode=0 Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.323422 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-5tlsd" event={"ID":"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9","Type":"ContainerDied","Data":"8da7b8a952ce6725bd6a1fdde48ae9f4e266b5c3da9591407a37d95476e23e5c"} Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.327596 4982 generic.go:334] "Generic (PLEG): container finished" podID="ae2b03b5-a7a9-4da1-bf23-8b589c508d26" containerID="ea9f4629d193aa5031d52f0fba91720a98d8fd32d9a98e1f798ded1fb349ec0d" exitCode=0 Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.327689 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" event={"ID":"ae2b03b5-a7a9-4da1-bf23-8b589c508d26","Type":"ContainerDied","Data":"ea9f4629d193aa5031d52f0fba91720a98d8fd32d9a98e1f798ded1fb349ec0d"} Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.815191 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.823200 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.927834 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9qkf\" (UniqueName: \"kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf\") pod \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.927914 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl8s4\" (UniqueName: \"kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4\") pod \"aff6a5d2-6643-4aa8-8427-c01cfc232232\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.927945 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts\") pod \"aff6a5d2-6643-4aa8-8427-c01cfc232232\" (UID: \"aff6a5d2-6643-4aa8-8427-c01cfc232232\") " Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.928590 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts\") pod \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\" (UID: \"56abf6c5-1b18-45ab-ab46-8359b05d5a19\") " Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.929674 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56abf6c5-1b18-45ab-ab46-8359b05d5a19" (UID: "56abf6c5-1b18-45ab-ab46-8359b05d5a19"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.930083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aff6a5d2-6643-4aa8-8427-c01cfc232232" (UID: "aff6a5d2-6643-4aa8-8427-c01cfc232232"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.934269 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf" (OuterVolumeSpecName: "kube-api-access-q9qkf") pod "56abf6c5-1b18-45ab-ab46-8359b05d5a19" (UID: "56abf6c5-1b18-45ab-ab46-8359b05d5a19"). InnerVolumeSpecName "kube-api-access-q9qkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:10 crc kubenswrapper[4982]: I0122 06:08:10.936988 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4" (OuterVolumeSpecName: "kube-api-access-fl8s4") pod "aff6a5d2-6643-4aa8-8427-c01cfc232232" (UID: "aff6a5d2-6643-4aa8-8427-c01cfc232232"). InnerVolumeSpecName "kube-api-access-fl8s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.030738 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9qkf\" (UniqueName: \"kubernetes.io/projected/56abf6c5-1b18-45ab-ab46-8359b05d5a19-kube-api-access-q9qkf\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.030763 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl8s4\" (UniqueName: \"kubernetes.io/projected/aff6a5d2-6643-4aa8-8427-c01cfc232232-kube-api-access-fl8s4\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.030772 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aff6a5d2-6643-4aa8-8427-c01cfc232232-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.030780 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56abf6c5-1b18-45ab-ab46-8359b05d5a19-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.171639 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.336218 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4p9q" event={"ID":"aff6a5d2-6643-4aa8-8427-c01cfc232232","Type":"ContainerDied","Data":"52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254"} Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.336426 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52d64e8f8be40fdfe78cdec07084473b0f2e17d271c16c8f8393681cfb205254" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.336278 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4p9q" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.338243 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.338255 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.338680 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7pgw9" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.347459 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7pgw9" event={"ID":"56abf6c5-1b18-45ab-ab46-8359b05d5a19","Type":"ContainerDied","Data":"336ee4628fe407f22cd65a70a74f21912633f858e60cc878400b300c50a0780f"} Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.348162 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="336ee4628fe407f22cd65a70a74f21912633f858e60cc878400b300c50a0780f" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.975578 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:11 crc kubenswrapper[4982]: I0122 06:08:11.999099 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.018221 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.098417 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.098612 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.105410 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.147950 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts\") pod \"889acc8d-10d5-490f-b9b7-d0514b16e62f\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.148034 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcqx5\" (UniqueName: \"kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5\") pod \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.148085 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts\") pod \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\" (UID: \"ae2b03b5-a7a9-4da1-bf23-8b589c508d26\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.148147 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bkhg\" (UniqueName: \"kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg\") pod \"889acc8d-10d5-490f-b9b7-d0514b16e62f\" (UID: \"889acc8d-10d5-490f-b9b7-d0514b16e62f\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.148219 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrlld\" (UniqueName: \"kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld\") pod \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.148260 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts\") pod \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\" (UID: \"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.161604 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" (UID: "2f867733-84f1-4d24-8b1d-a46f5f9a6ff9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.163831 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "889acc8d-10d5-490f-b9b7-d0514b16e62f" (UID: "889acc8d-10d5-490f-b9b7-d0514b16e62f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.178135 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ae2b03b5-a7a9-4da1-bf23-8b589c508d26" (UID: "ae2b03b5-a7a9-4da1-bf23-8b589c508d26"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.186212 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg" (OuterVolumeSpecName: "kube-api-access-2bkhg") pod "889acc8d-10d5-490f-b9b7-d0514b16e62f" (UID: "889acc8d-10d5-490f-b9b7-d0514b16e62f"). InnerVolumeSpecName "kube-api-access-2bkhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.187203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5" (OuterVolumeSpecName: "kube-api-access-kcqx5") pod "ae2b03b5-a7a9-4da1-bf23-8b589c508d26" (UID: "ae2b03b5-a7a9-4da1-bf23-8b589c508d26"). InnerVolumeSpecName "kube-api-access-kcqx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.187290 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld" (OuterVolumeSpecName: "kube-api-access-lrlld") pod "2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" (UID: "2f867733-84f1-4d24-8b1d-a46f5f9a6ff9"). InnerVolumeSpecName "kube-api-access-lrlld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.258403 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts\") pod \"29a36a50-28da-44d3-934a-89bcf37ac576\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.258554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nwnq\" (UniqueName: \"kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq\") pod \"29a36a50-28da-44d3-934a-89bcf37ac576\" (UID: \"29a36a50-28da-44d3-934a-89bcf37ac576\") " Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29a36a50-28da-44d3-934a-89bcf37ac576" (UID: "29a36a50-28da-44d3-934a-89bcf37ac576"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259638 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/889acc8d-10d5-490f-b9b7-d0514b16e62f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259712 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcqx5\" (UniqueName: \"kubernetes.io/projected/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-kube-api-access-kcqx5\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259792 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae2b03b5-a7a9-4da1-bf23-8b589c508d26-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259845 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bkhg\" (UniqueName: \"kubernetes.io/projected/889acc8d-10d5-490f-b9b7-d0514b16e62f-kube-api-access-2bkhg\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259914 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a36a50-28da-44d3-934a-89bcf37ac576-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.259977 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrlld\" (UniqueName: \"kubernetes.io/projected/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-kube-api-access-lrlld\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.260030 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.266917 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq" (OuterVolumeSpecName: "kube-api-access-4nwnq") pod "29a36a50-28da-44d3-934a-89bcf37ac576" (UID: "29a36a50-28da-44d3-934a-89bcf37ac576"). InnerVolumeSpecName "kube-api-access-4nwnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.346721 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" event={"ID":"ae2b03b5-a7a9-4da1-bf23-8b589c508d26","Type":"ContainerDied","Data":"55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145"} Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.346765 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55f219578a2f9a9700cb2c9559c0d56112c10ff6e84fbce8b6cd25e3df8a9145" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.346764 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-p5xg6" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.348466 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-r5m24" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.348482 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-r5m24" event={"ID":"889acc8d-10d5-490f-b9b7-d0514b16e62f","Type":"ContainerDied","Data":"5740809c2c42f559bc2caa45e83baa4280b8163d7aa36f15df65b2711ec12ba8"} Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.348538 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5740809c2c42f559bc2caa45e83baa4280b8163d7aa36f15df65b2711ec12ba8" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.351700 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kx85p" event={"ID":"29a36a50-28da-44d3-934a-89bcf37ac576","Type":"ContainerDied","Data":"a12062de8b7a017b8c12cc8d0f03cdade4ec516008f6055ab3139141a3f403e1"} Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.351744 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a12062de8b7a017b8c12cc8d0f03cdade4ec516008f6055ab3139141a3f403e1" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.351708 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kx85p" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.354205 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-5tlsd" event={"ID":"2f867733-84f1-4d24-8b1d-a46f5f9a6ff9","Type":"ContainerDied","Data":"372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2"} Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.354264 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="372744438a6481b9d52e03a4f0b6426def70271867d4ec141942b628649c94a2" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.354228 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-5tlsd" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.362002 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nwnq\" (UniqueName: \"kubernetes.io/projected/29a36a50-28da-44d3-934a-89bcf37ac576-kube-api-access-4nwnq\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.956635 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2jrzv"] Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957227 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a36a50-28da-44d3-934a-89bcf37ac576" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957244 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a36a50-28da-44d3-934a-89bcf37ac576" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957262 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957268 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957281 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56abf6c5-1b18-45ab-ab46-8359b05d5a19" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957287 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="56abf6c5-1b18-45ab-ab46-8359b05d5a19" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957304 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2b03b5-a7a9-4da1-bf23-8b589c508d26" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957309 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2b03b5-a7a9-4da1-bf23-8b589c508d26" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957320 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aff6a5d2-6643-4aa8-8427-c01cfc232232" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957325 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aff6a5d2-6643-4aa8-8427-c01cfc232232" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: E0122 06:08:12.957338 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889acc8d-10d5-490f-b9b7-d0514b16e62f" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957345 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="889acc8d-10d5-490f-b9b7-d0514b16e62f" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957489 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="889acc8d-10d5-490f-b9b7-d0514b16e62f" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957508 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="aff6a5d2-6643-4aa8-8427-c01cfc232232" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957517 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae2b03b5-a7a9-4da1-bf23-8b589c508d26" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957527 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="56abf6c5-1b18-45ab-ab46-8359b05d5a19" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957537 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" containerName="mariadb-account-create-update" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.957546 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a36a50-28da-44d3-934a-89bcf37ac576" containerName="mariadb-database-create" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.958166 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.962393 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.962643 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jqvj4" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.969429 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 22 06:08:12 crc kubenswrapper[4982]: I0122 06:08:12.973738 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2jrzv"] Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.074204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.074272 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.074309 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft7jl\" (UniqueName: \"kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.074399 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.175596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.175641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft7jl\" (UniqueName: \"kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.175670 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.175803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.180689 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.181347 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.182072 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.194581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft7jl\" (UniqueName: \"kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl\") pod \"nova-cell0-conductor-db-sync-2jrzv\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.313891 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.314161 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-central-agent" containerID="cri-o://4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425" gracePeriod=30 Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.314247 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="sg-core" containerID="cri-o://27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428" gracePeriod=30 Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.314336 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="proxy-httpd" containerID="cri-o://cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61" gracePeriod=30 Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.314263 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-notification-agent" containerID="cri-o://2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f" gracePeriod=30 Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.328163 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:13 crc kubenswrapper[4982]: I0122 06:08:13.786527 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2jrzv"] Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375546 4982 generic.go:334] "Generic (PLEG): container finished" podID="08c83842-d8b5-4f79-98d5-8822a6716046" containerID="cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61" exitCode=0 Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375867 4982 generic.go:334] "Generic (PLEG): container finished" podID="08c83842-d8b5-4f79-98d5-8822a6716046" containerID="27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428" exitCode=2 Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375879 4982 generic.go:334] "Generic (PLEG): container finished" podID="08c83842-d8b5-4f79-98d5-8822a6716046" containerID="4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425" exitCode=0 Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerDied","Data":"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61"} Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375954 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerDied","Data":"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428"} Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.375976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerDied","Data":"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425"} Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.377638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" event={"ID":"b1bbcbea-8862-4397-a672-5083ba34c23e","Type":"ContainerStarted","Data":"5f9083a2496fa00ab8c3ad2951dfadfdfa59a9770ae5713ebd53f475d09f5ee9"} Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.771407 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921076 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxrcp\" (UniqueName: \"kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921118 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921191 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921276 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921297 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921312 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.921363 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle\") pod \"08c83842-d8b5-4f79-98d5-8822a6716046\" (UID: \"08c83842-d8b5-4f79-98d5-8822a6716046\") " Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.922839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.922877 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.928012 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts" (OuterVolumeSpecName: "scripts") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.930031 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp" (OuterVolumeSpecName: "kube-api-access-cxrcp") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "kube-api-access-cxrcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:14 crc kubenswrapper[4982]: I0122 06:08:14.958205 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.004946 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023400 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023432 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxrcp\" (UniqueName: \"kubernetes.io/projected/08c83842-d8b5-4f79-98d5-8822a6716046-kube-api-access-cxrcp\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023445 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023454 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023462 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.023471 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08c83842-d8b5-4f79-98d5-8822a6716046-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.037530 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data" (OuterVolumeSpecName: "config-data") pod "08c83842-d8b5-4f79-98d5-8822a6716046" (UID: "08c83842-d8b5-4f79-98d5-8822a6716046"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.147397 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c83842-d8b5-4f79-98d5-8822a6716046-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.392156 4982 generic.go:334] "Generic (PLEG): container finished" podID="08c83842-d8b5-4f79-98d5-8822a6716046" containerID="2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f" exitCode=0 Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.392201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerDied","Data":"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f"} Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.392243 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.392265 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08c83842-d8b5-4f79-98d5-8822a6716046","Type":"ContainerDied","Data":"afdaedf754dc020941f1936e4ca11bc7a797e51f9b29803e51c029fd03393a76"} Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.392293 4982 scope.go:117] "RemoveContainer" containerID="cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.481907 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.483439 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.483400 4982 scope.go:117] "RemoveContainer" containerID="27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.494548 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.494933 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="proxy-httpd" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.494954 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="proxy-httpd" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.494981 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="sg-core" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.494987 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="sg-core" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.495004 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-notification-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495010 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-notification-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.495020 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-central-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495026 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-central-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495177 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="proxy-httpd" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495192 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-central-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495204 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="sg-core" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.495219 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" containerName="ceilometer-notification-agent" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.496906 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.502589 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.503487 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.503840 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.542834 4982 scope.go:117] "RemoveContainer" containerID="2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.563777 4982 scope.go:117] "RemoveContainer" containerID="4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.573832 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmqnf\" (UniqueName: \"kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.573929 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.573957 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.573985 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.574264 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.574444 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.574472 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.582239 4982 scope.go:117] "RemoveContainer" containerID="cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.583021 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61\": container with ID starting with cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61 not found: ID does not exist" containerID="cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.583066 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61"} err="failed to get container status \"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61\": rpc error: code = NotFound desc = could not find container \"cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61\": container with ID starting with cfa50ade837d368deabf7da090c6876df6855a9c350204462403f2149d091c61 not found: ID does not exist" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.583095 4982 scope.go:117] "RemoveContainer" containerID="27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.583466 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428\": container with ID starting with 27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428 not found: ID does not exist" containerID="27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.583534 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428"} err="failed to get container status \"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428\": rpc error: code = NotFound desc = could not find container \"27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428\": container with ID starting with 27905b82ecca95fdc8666b6e06b7d9ad4b83698b954a35ee4cff76daae176428 not found: ID does not exist" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.583590 4982 scope.go:117] "RemoveContainer" containerID="2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.584958 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f\": container with ID starting with 2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f not found: ID does not exist" containerID="2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.584990 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f"} err="failed to get container status \"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f\": rpc error: code = NotFound desc = could not find container \"2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f\": container with ID starting with 2228fe15d0c41108ea21cdef57d85a06e53c58784daf02c9d65e24de860de72f not found: ID does not exist" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.585014 4982 scope.go:117] "RemoveContainer" containerID="4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425" Jan 22 06:08:15 crc kubenswrapper[4982]: E0122 06:08:15.585321 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425\": container with ID starting with 4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425 not found: ID does not exist" containerID="4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.585345 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425"} err="failed to get container status \"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425\": rpc error: code = NotFound desc = could not find container \"4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425\": container with ID starting with 4dbe8daf61d54d91fb3d1fadec30f7969810f10549b1154a5884920ceecd9425 not found: ID does not exist" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmqnf\" (UniqueName: \"kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676375 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676394 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676416 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676462 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676489 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.676501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.677250 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.678164 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.681591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.682222 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.682821 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.683217 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.703414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmqnf\" (UniqueName: \"kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf\") pod \"ceilometer-0\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " pod="openstack/ceilometer-0" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.735096 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08c83842-d8b5-4f79-98d5-8822a6716046" path="/var/lib/kubelet/pods/08c83842-d8b5-4f79-98d5-8822a6716046/volumes" Jan 22 06:08:15 crc kubenswrapper[4982]: I0122 06:08:15.816328 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:16 crc kubenswrapper[4982]: I0122 06:08:16.242734 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:16 crc kubenswrapper[4982]: I0122 06:08:16.402301 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerStarted","Data":"0fc08ba503bca111f0fefad22f2d1e540e380ac57d063d68d7a5ee051d482a34"} Jan 22 06:08:21 crc kubenswrapper[4982]: I0122 06:08:21.443493 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" event={"ID":"b1bbcbea-8862-4397-a672-5083ba34c23e","Type":"ContainerStarted","Data":"52150f5fa6e18d846be10060dab5d01e9c21ff0a57b517612e0f3a5ddd979624"} Jan 22 06:08:21 crc kubenswrapper[4982]: I0122 06:08:21.446005 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerStarted","Data":"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711"} Jan 22 06:08:21 crc kubenswrapper[4982]: I0122 06:08:21.446046 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerStarted","Data":"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39"} Jan 22 06:08:21 crc kubenswrapper[4982]: I0122 06:08:21.708256 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b344d2d6-b86e-47d1-9e84-4e263485a947" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 06:08:22 crc kubenswrapper[4982]: I0122 06:08:22.464532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerStarted","Data":"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84"} Jan 22 06:08:24 crc kubenswrapper[4982]: I0122 06:08:24.487579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerStarted","Data":"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747"} Jan 22 06:08:24 crc kubenswrapper[4982]: I0122 06:08:24.488001 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:08:24 crc kubenswrapper[4982]: I0122 06:08:24.517394 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.375811766 podStartE2EDuration="9.517365869s" podCreationTimestamp="2026-01-22 06:08:15 +0000 UTC" firstStartedPulling="2026-01-22 06:08:16.257645979 +0000 UTC m=+1357.096283982" lastFinishedPulling="2026-01-22 06:08:23.399200042 +0000 UTC m=+1364.237838085" observedRunningTime="2026-01-22 06:08:24.505772218 +0000 UTC m=+1365.344410221" watchObservedRunningTime="2026-01-22 06:08:24.517365869 +0000 UTC m=+1365.356003912" Jan 22 06:08:24 crc kubenswrapper[4982]: I0122 06:08:24.522027 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" podStartSLOduration=5.809055063 podStartE2EDuration="12.52200599s" podCreationTimestamp="2026-01-22 06:08:12 +0000 UTC" firstStartedPulling="2026-01-22 06:08:13.801804595 +0000 UTC m=+1354.640442598" lastFinishedPulling="2026-01-22 06:08:20.514755492 +0000 UTC m=+1361.353393525" observedRunningTime="2026-01-22 06:08:21.465905207 +0000 UTC m=+1362.304543210" watchObservedRunningTime="2026-01-22 06:08:24.52200599 +0000 UTC m=+1365.360644023" Jan 22 06:08:26 crc kubenswrapper[4982]: I0122 06:08:26.491609 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:26 crc kubenswrapper[4982]: I0122 06:08:26.506575 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-central-agent" containerID="cri-o://0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39" gracePeriod=30 Jan 22 06:08:26 crc kubenswrapper[4982]: I0122 06:08:26.506659 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="proxy-httpd" containerID="cri-o://83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747" gracePeriod=30 Jan 22 06:08:26 crc kubenswrapper[4982]: I0122 06:08:26.506712 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-notification-agent" containerID="cri-o://55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711" gracePeriod=30 Jan 22 06:08:26 crc kubenswrapper[4982]: I0122 06:08:26.506685 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="sg-core" containerID="cri-o://56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84" gracePeriod=30 Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524406 4982 generic.go:334] "Generic (PLEG): container finished" podID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerID="83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747" exitCode=0 Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524686 4982 generic.go:334] "Generic (PLEG): container finished" podID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerID="56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84" exitCode=2 Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524698 4982 generic.go:334] "Generic (PLEG): container finished" podID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerID="55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711" exitCode=0 Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524723 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerDied","Data":"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747"} Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524755 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerDied","Data":"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84"} Jan 22 06:08:27 crc kubenswrapper[4982]: I0122 06:08:27.524769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerDied","Data":"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711"} Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.439217 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.564130 4982 generic.go:334] "Generic (PLEG): container finished" podID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerID="0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39" exitCode=0 Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.564163 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerDied","Data":"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39"} Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.564210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a5ec07c-1966-4c80-99e0-1548b8bca655","Type":"ContainerDied","Data":"0fc08ba503bca111f0fefad22f2d1e540e380ac57d063d68d7a5ee051d482a34"} Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.564233 4982 scope.go:117] "RemoveContainer" containerID="83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.564273 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.571927 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.571981 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmqnf\" (UniqueName: \"kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572021 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572088 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572314 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572516 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts\") pod \"7a5ec07c-1966-4c80-99e0-1548b8bca655\" (UID: \"7a5ec07c-1966-4c80-99e0-1548b8bca655\") " Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572541 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.572830 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.573392 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.573425 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a5ec07c-1966-4c80-99e0-1548b8bca655-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.580351 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts" (OuterVolumeSpecName: "scripts") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.583584 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf" (OuterVolumeSpecName: "kube-api-access-tmqnf") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "kube-api-access-tmqnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.595036 4982 scope.go:117] "RemoveContainer" containerID="56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.606779 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.675187 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.675239 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmqnf\" (UniqueName: \"kubernetes.io/projected/7a5ec07c-1966-4c80-99e0-1548b8bca655-kube-api-access-tmqnf\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.675259 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.691654 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.693224 4982 scope.go:117] "RemoveContainer" containerID="55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.741410 4982 scope.go:117] "RemoveContainer" containerID="0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.745444 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data" (OuterVolumeSpecName: "config-data") pod "7a5ec07c-1966-4c80-99e0-1548b8bca655" (UID: "7a5ec07c-1966-4c80-99e0-1548b8bca655"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.764333 4982 scope.go:117] "RemoveContainer" containerID="83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.764828 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747\": container with ID starting with 83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747 not found: ID does not exist" containerID="83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.764899 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747"} err="failed to get container status \"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747\": rpc error: code = NotFound desc = could not find container \"83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747\": container with ID starting with 83a984c8b8b3c4f50eb4cfabb11d3a43592a53dec3d9647ef9c0499bdc7c6747 not found: ID does not exist" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.764930 4982 scope.go:117] "RemoveContainer" containerID="56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.765373 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84\": container with ID starting with 56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84 not found: ID does not exist" containerID="56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.765410 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84"} err="failed to get container status \"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84\": rpc error: code = NotFound desc = could not find container \"56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84\": container with ID starting with 56e7df9ff163811b79dece3b1ee8bdc5bf354275dad2ff16bc8bdaac05b3bc84 not found: ID does not exist" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.765454 4982 scope.go:117] "RemoveContainer" containerID="55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.765829 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711\": container with ID starting with 55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711 not found: ID does not exist" containerID="55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.765914 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711"} err="failed to get container status \"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711\": rpc error: code = NotFound desc = could not find container \"55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711\": container with ID starting with 55b0245e07cf8cb31350f9d66abd1714f68af6fd090228bf50b020aa57ab0711 not found: ID does not exist" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.765974 4982 scope.go:117] "RemoveContainer" containerID="0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.766330 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39\": container with ID starting with 0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39 not found: ID does not exist" containerID="0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.766370 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39"} err="failed to get container status \"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39\": rpc error: code = NotFound desc = could not find container \"0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39\": container with ID starting with 0b41d8441bd9cd826ebbca7cfd337cc4cfc9cc83becc636feb731d49e2530e39 not found: ID does not exist" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.776977 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.777009 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5ec07c-1966-4c80-99e0-1548b8bca655-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.934914 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.949475 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.959353 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.961247 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="sg-core" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961310 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="sg-core" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.961333 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-central-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961340 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-central-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.961372 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-notification-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961378 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-notification-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: E0122 06:08:30.961386 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="proxy-httpd" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961393 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="proxy-httpd" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961546 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-central-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961559 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="sg-core" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961604 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="ceilometer-notification-agent" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.961612 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" containerName="proxy-httpd" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.964317 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.967806 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.967956 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:08:30 crc kubenswrapper[4982]: I0122 06:08:30.989203 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.086441 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.086770 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.086818 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.086907 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.086935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.087009 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.087044 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsprz\" (UniqueName: \"kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189445 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsprz\" (UniqueName: \"kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189653 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189764 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.189913 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.190015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.190640 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.190770 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.194261 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.195381 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.196322 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.204964 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.205433 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsprz\" (UniqueName: \"kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz\") pod \"ceilometer-0\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.284976 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.732481 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a5ec07c-1966-4c80-99e0-1548b8bca655" path="/var/lib/kubelet/pods/7a5ec07c-1966-4c80-99e0-1548b8bca655/volumes" Jan 22 06:08:31 crc kubenswrapper[4982]: W0122 06:08:31.785640 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod408e52a9_8be5_49ff_9cbd_dbbdc7e726a6.slice/crio-a56d539ff4043d69c57f705071e8f40be8d6cebb41b3378534697264a41b23ba WatchSource:0}: Error finding container a56d539ff4043d69c57f705071e8f40be8d6cebb41b3378534697264a41b23ba: Status 404 returned error can't find the container with id a56d539ff4043d69c57f705071e8f40be8d6cebb41b3378534697264a41b23ba Jan 22 06:08:31 crc kubenswrapper[4982]: I0122 06:08:31.804615 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:08:32 crc kubenswrapper[4982]: I0122 06:08:32.585103 4982 generic.go:334] "Generic (PLEG): container finished" podID="b1bbcbea-8862-4397-a672-5083ba34c23e" containerID="52150f5fa6e18d846be10060dab5d01e9c21ff0a57b517612e0f3a5ddd979624" exitCode=0 Jan 22 06:08:32 crc kubenswrapper[4982]: I0122 06:08:32.585194 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" event={"ID":"b1bbcbea-8862-4397-a672-5083ba34c23e","Type":"ContainerDied","Data":"52150f5fa6e18d846be10060dab5d01e9c21ff0a57b517612e0f3a5ddd979624"} Jan 22 06:08:32 crc kubenswrapper[4982]: I0122 06:08:32.588747 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerStarted","Data":"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13"} Jan 22 06:08:32 crc kubenswrapper[4982]: I0122 06:08:32.588795 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerStarted","Data":"a56d539ff4043d69c57f705071e8f40be8d6cebb41b3378534697264a41b23ba"} Jan 22 06:08:33 crc kubenswrapper[4982]: I0122 06:08:33.603232 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerStarted","Data":"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2"} Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.107018 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.274091 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data\") pod \"b1bbcbea-8862-4397-a672-5083ba34c23e\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.274238 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts\") pod \"b1bbcbea-8862-4397-a672-5083ba34c23e\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.274479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle\") pod \"b1bbcbea-8862-4397-a672-5083ba34c23e\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.274521 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft7jl\" (UniqueName: \"kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl\") pod \"b1bbcbea-8862-4397-a672-5083ba34c23e\" (UID: \"b1bbcbea-8862-4397-a672-5083ba34c23e\") " Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.282413 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl" (OuterVolumeSpecName: "kube-api-access-ft7jl") pod "b1bbcbea-8862-4397-a672-5083ba34c23e" (UID: "b1bbcbea-8862-4397-a672-5083ba34c23e"). InnerVolumeSpecName "kube-api-access-ft7jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.283587 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts" (OuterVolumeSpecName: "scripts") pod "b1bbcbea-8862-4397-a672-5083ba34c23e" (UID: "b1bbcbea-8862-4397-a672-5083ba34c23e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.302091 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1bbcbea-8862-4397-a672-5083ba34c23e" (UID: "b1bbcbea-8862-4397-a672-5083ba34c23e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.307613 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data" (OuterVolumeSpecName: "config-data") pod "b1bbcbea-8862-4397-a672-5083ba34c23e" (UID: "b1bbcbea-8862-4397-a672-5083ba34c23e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.376951 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.376991 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.377004 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft7jl\" (UniqueName: \"kubernetes.io/projected/b1bbcbea-8862-4397-a672-5083ba34c23e-kube-api-access-ft7jl\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.377012 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1bbcbea-8862-4397-a672-5083ba34c23e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.611258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" event={"ID":"b1bbcbea-8862-4397-a672-5083ba34c23e","Type":"ContainerDied","Data":"5f9083a2496fa00ab8c3ad2951dfadfdfa59a9770ae5713ebd53f475d09f5ee9"} Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.611324 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2jrzv" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.611334 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f9083a2496fa00ab8c3ad2951dfadfdfa59a9770ae5713ebd53f475d09f5ee9" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.612727 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerStarted","Data":"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4"} Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.699524 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:08:34 crc kubenswrapper[4982]: E0122 06:08:34.699898 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1bbcbea-8862-4397-a672-5083ba34c23e" containerName="nova-cell0-conductor-db-sync" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.699915 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1bbcbea-8862-4397-a672-5083ba34c23e" containerName="nova-cell0-conductor-db-sync" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.700128 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1bbcbea-8862-4397-a672-5083ba34c23e" containerName="nova-cell0-conductor-db-sync" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.700670 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.702834 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.703222 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-jqvj4" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.718626 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.885466 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szzhp\" (UniqueName: \"kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.885574 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.885594 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.987615 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szzhp\" (UniqueName: \"kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.987768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.987807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.994118 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:34 crc kubenswrapper[4982]: I0122 06:08:34.997068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:35 crc kubenswrapper[4982]: I0122 06:08:35.012532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szzhp\" (UniqueName: \"kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp\") pod \"nova-cell0-conductor-0\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:35 crc kubenswrapper[4982]: I0122 06:08:35.015473 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:35 crc kubenswrapper[4982]: W0122 06:08:35.504502 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d1d97fa_17f8_45ed_9881_5d3896c48708.slice/crio-1d43204a03b40042a47444008e962bdbba192494f6b0fd1b3263fa0bad4ceaae WatchSource:0}: Error finding container 1d43204a03b40042a47444008e962bdbba192494f6b0fd1b3263fa0bad4ceaae: Status 404 returned error can't find the container with id 1d43204a03b40042a47444008e962bdbba192494f6b0fd1b3263fa0bad4ceaae Jan 22 06:08:35 crc kubenswrapper[4982]: I0122 06:08:35.506809 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:08:35 crc kubenswrapper[4982]: I0122 06:08:35.622784 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d1d97fa-17f8-45ed-9881-5d3896c48708","Type":"ContainerStarted","Data":"1d43204a03b40042a47444008e962bdbba192494f6b0fd1b3263fa0bad4ceaae"} Jan 22 06:08:36 crc kubenswrapper[4982]: I0122 06:08:36.632087 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d1d97fa-17f8-45ed-9881-5d3896c48708","Type":"ContainerStarted","Data":"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd"} Jan 22 06:08:36 crc kubenswrapper[4982]: I0122 06:08:36.632380 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:36 crc kubenswrapper[4982]: I0122 06:08:36.635271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerStarted","Data":"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f"} Jan 22 06:08:36 crc kubenswrapper[4982]: I0122 06:08:36.635438 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:08:36 crc kubenswrapper[4982]: I0122 06:08:36.657052 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.657031707 podStartE2EDuration="2.657031707s" podCreationTimestamp="2026-01-22 06:08:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:36.647673603 +0000 UTC m=+1377.486311616" watchObservedRunningTime="2026-01-22 06:08:36.657031707 +0000 UTC m=+1377.495669720" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.065494 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.094216 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=10.651432628 podStartE2EDuration="15.094189971s" podCreationTimestamp="2026-01-22 06:08:30 +0000 UTC" firstStartedPulling="2026-01-22 06:08:31.788502532 +0000 UTC m=+1372.627140545" lastFinishedPulling="2026-01-22 06:08:36.231259875 +0000 UTC m=+1377.069897888" observedRunningTime="2026-01-22 06:08:36.674351587 +0000 UTC m=+1377.512989650" watchObservedRunningTime="2026-01-22 06:08:45.094189971 +0000 UTC m=+1385.932828014" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.571815 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-cpdrg"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.573126 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.576207 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.577246 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.605461 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cpdrg"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.685410 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwrsb\" (UniqueName: \"kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.685729 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.685762 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.685784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.791665 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.791716 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.791747 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.791817 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwrsb\" (UniqueName: \"kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.802691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.821800 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.824739 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.837080 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwrsb\" (UniqueName: \"kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb\") pod \"nova-cell0-cell-mapping-cpdrg\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.846048 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.847139 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.854369 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.857276 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.895068 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.955210 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.956495 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.960202 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.965910 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.967407 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.969629 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.996374 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.997993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.998042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:45 crc kubenswrapper[4982]: I0122 06:08:45.998125 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtkp5\" (UniqueName: \"kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.050526 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.099761 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c68kj\" (UniqueName: \"kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.099889 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.099917 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.099984 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100003 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100070 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtkp5\" (UniqueName: \"kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.100195 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2vh8\" (UniqueName: \"kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.113433 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.143312 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtkp5\" (UniqueName: \"kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.150768 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.151435 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data\") pod \"nova-scheduler-0\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.152266 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.162380 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.176974 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.201918 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.201967 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2vh8\" (UniqueName: \"kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c68kj\" (UniqueName: \"kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202050 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202071 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202087 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202115 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.202776 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.204246 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.205803 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.209185 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.215532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.220088 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.223344 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2vh8\" (UniqueName: \"kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8\") pod \"nova-metadata-0\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.226752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c68kj\" (UniqueName: \"kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.234724 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.235275 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.235468 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304816 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304879 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304923 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304943 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304961 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.304989 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.305012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d79cp\" (UniqueName: \"kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.305032 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.305096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.305122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5czq\" (UniqueName: \"kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.346206 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.386452 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.408843 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.408908 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.408955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.408974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.408995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.409026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.409050 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d79cp\" (UniqueName: \"kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.409076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.409156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.412709 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5czq\" (UniqueName: \"kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.409892 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.410090 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.410325 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.410516 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.411040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.410010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.414614 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.424086 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.429438 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d79cp\" (UniqueName: \"kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp\") pod \"dnsmasq-dns-647df7b8c5-qklw7\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.447627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5czq\" (UniqueName: \"kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq\") pod \"nova-api-0\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.505257 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.513502 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cpdrg"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.542294 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.718458 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r9qxz"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.719819 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.723435 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.723633 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.745179 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r9qxz"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.763356 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.795814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cpdrg" event={"ID":"6c26adbc-0d63-423d-b805-bc5e866bf6f2","Type":"ContainerStarted","Data":"a0ad193e8899ba5f386890fc460715811f27192885e398c2d2caf5e6a3afc1c6"} Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.797345 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b00568f8-4b83-4844-bbda-9a24aee96ead","Type":"ContainerStarted","Data":"72ed7252cb235c9cb14a169399457bc7553e6d4745e93f486c55ebd8a1f62168"} Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.818004 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.818265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.818428 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.818629 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scl9l\" (UniqueName: \"kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.915117 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.920335 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.920416 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scl9l\" (UniqueName: \"kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.920501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.920568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.925761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.926414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.947586 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:46 crc kubenswrapper[4982]: I0122 06:08:46.947805 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scl9l\" (UniqueName: \"kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l\") pod \"nova-cell1-conductor-db-sync-r9qxz\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.046976 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.052917 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.086701 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.201014 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.660156 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r9qxz"] Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.834297 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cpdrg" event={"ID":"6c26adbc-0d63-423d-b805-bc5e866bf6f2","Type":"ContainerStarted","Data":"8c0bf9c9c622b073e5e691a41bfe671bee3ec8a34d459fc686856f0e0eef9678"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.836175 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerStarted","Data":"e4e19249a5af9d3534cfad92d1397b75b2fd07e5ee7fca9ea0b282d473229f0e"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.837493 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bc86273-7064-4dd4-a7c6-8d809c420ebc","Type":"ContainerStarted","Data":"14650da8b438b35223c0787dc2bf5319a6537b08785461c0a44047d3d91ffe3f"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.838756 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerStarted","Data":"ae519c91c2bf21b723037607358723506d946825d185b4ebfdcc084983191f2c"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.847084 4982 generic.go:334] "Generic (PLEG): container finished" podID="9918bf38-2e78-4627-b996-05f42319fb53" containerID="4e93f63aba0b979e2191d5bd9df573e67630655b32c7ca1e799256e191df8d37" exitCode=0 Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.847175 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" event={"ID":"9918bf38-2e78-4627-b996-05f42319fb53","Type":"ContainerDied","Data":"4e93f63aba0b979e2191d5bd9df573e67630655b32c7ca1e799256e191df8d37"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.847214 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" event={"ID":"9918bf38-2e78-4627-b996-05f42319fb53","Type":"ContainerStarted","Data":"c008dc238a63533c7abd6e70661e790286c6c0f9bdc45bcecda7850d99e0233b"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.866176 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-cpdrg" podStartSLOduration=2.866157996 podStartE2EDuration="2.866157996s" podCreationTimestamp="2026-01-22 06:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:47.857593353 +0000 UTC m=+1388.696231356" watchObservedRunningTime="2026-01-22 06:08:47.866157996 +0000 UTC m=+1388.704796009" Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.869066 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" event={"ID":"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c","Type":"ContainerStarted","Data":"baa68c4b84e0bd3b726cd5648b5cb36eaef6c60fc2ea86a61307f20df71f6fca"} Jan 22 06:08:47 crc kubenswrapper[4982]: I0122 06:08:47.869276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" event={"ID":"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c","Type":"ContainerStarted","Data":"5ca3e0a2e2ddbb1bd157c6a42891f2e6ea3f4e0b60a934d6009fab5d870b986c"} Jan 22 06:08:48 crc kubenswrapper[4982]: I0122 06:08:48.878865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" event={"ID":"9918bf38-2e78-4627-b996-05f42319fb53","Type":"ContainerStarted","Data":"5dc16f0c54d8ae6a28c68a312b07df0a689510c87837e0a4b7e78b5b759f00ea"} Jan 22 06:08:48 crc kubenswrapper[4982]: I0122 06:08:48.904342 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" podStartSLOduration=2.904323102 podStartE2EDuration="2.904323102s" podCreationTimestamp="2026-01-22 06:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:47.912754197 +0000 UTC m=+1388.751392200" watchObservedRunningTime="2026-01-22 06:08:48.904323102 +0000 UTC m=+1389.742961105" Jan 22 06:08:49 crc kubenswrapper[4982]: I0122 06:08:49.427577 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" podStartSLOduration=3.427558749 podStartE2EDuration="3.427558749s" podCreationTimestamp="2026-01-22 06:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:48.898263435 +0000 UTC m=+1389.736901438" watchObservedRunningTime="2026-01-22 06:08:49.427558749 +0000 UTC m=+1390.266196752" Jan 22 06:08:49 crc kubenswrapper[4982]: I0122 06:08:49.434881 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:08:49 crc kubenswrapper[4982]: I0122 06:08:49.453743 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:49 crc kubenswrapper[4982]: I0122 06:08:49.887211 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.922043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerStarted","Data":"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.922808 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerStarted","Data":"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.923149 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-log" containerID="cri-o://4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" gracePeriod=30 Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.923920 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-metadata" containerID="cri-o://bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" gracePeriod=30 Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.931103 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bc86273-7064-4dd4-a7c6-8d809c420ebc","Type":"ContainerStarted","Data":"1a1ebed28006139fb5213e7b71ef77012030d9bba6e3862a0578b298c207244d"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.931506 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1a1ebed28006139fb5213e7b71ef77012030d9bba6e3862a0578b298c207244d" gracePeriod=30 Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.942676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b00568f8-4b83-4844-bbda-9a24aee96ead","Type":"ContainerStarted","Data":"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.952006 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.485301436 podStartE2EDuration="6.951990816s" podCreationTimestamp="2026-01-22 06:08:45 +0000 UTC" firstStartedPulling="2026-01-22 06:08:47.132035375 +0000 UTC m=+1387.970673378" lastFinishedPulling="2026-01-22 06:08:50.598724755 +0000 UTC m=+1391.437362758" observedRunningTime="2026-01-22 06:08:51.95136706 +0000 UTC m=+1392.790005083" watchObservedRunningTime="2026-01-22 06:08:51.951990816 +0000 UTC m=+1392.790628819" Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.966579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerStarted","Data":"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.966615 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerStarted","Data":"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad"} Jan 22 06:08:51 crc kubenswrapper[4982]: I0122 06:08:51.985486 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.145307814 podStartE2EDuration="6.985465036s" podCreationTimestamp="2026-01-22 06:08:45 +0000 UTC" firstStartedPulling="2026-01-22 06:08:46.76038172 +0000 UTC m=+1387.599019723" lastFinishedPulling="2026-01-22 06:08:50.600538932 +0000 UTC m=+1391.439176945" observedRunningTime="2026-01-22 06:08:51.973876185 +0000 UTC m=+1392.812514198" watchObservedRunningTime="2026-01-22 06:08:51.985465036 +0000 UTC m=+1392.824103059" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.005612 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.3557671559999998 podStartE2EDuration="7.005590169s" podCreationTimestamp="2026-01-22 06:08:45 +0000 UTC" firstStartedPulling="2026-01-22 06:08:46.948991064 +0000 UTC m=+1387.787629067" lastFinishedPulling="2026-01-22 06:08:50.598814077 +0000 UTC m=+1391.437452080" observedRunningTime="2026-01-22 06:08:51.999122192 +0000 UTC m=+1392.837760215" watchObservedRunningTime="2026-01-22 06:08:52.005590169 +0000 UTC m=+1392.844228192" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.031522 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.633357646 podStartE2EDuration="7.031500634s" podCreationTimestamp="2026-01-22 06:08:45 +0000 UTC" firstStartedPulling="2026-01-22 06:08:47.198513754 +0000 UTC m=+1388.037151757" lastFinishedPulling="2026-01-22 06:08:50.596656702 +0000 UTC m=+1391.435294745" observedRunningTime="2026-01-22 06:08:52.015495118 +0000 UTC m=+1392.854133151" watchObservedRunningTime="2026-01-22 06:08:52.031500634 +0000 UTC m=+1392.870138647" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.518961 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.659247 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data\") pod \"e908db97-58de-43eb-88a8-5ec6f7c4b958\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.659733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs\") pod \"e908db97-58de-43eb-88a8-5ec6f7c4b958\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.659796 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle\") pod \"e908db97-58de-43eb-88a8-5ec6f7c4b958\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.659827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2vh8\" (UniqueName: \"kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8\") pod \"e908db97-58de-43eb-88a8-5ec6f7c4b958\" (UID: \"e908db97-58de-43eb-88a8-5ec6f7c4b958\") " Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.660478 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs" (OuterVolumeSpecName: "logs") pod "e908db97-58de-43eb-88a8-5ec6f7c4b958" (UID: "e908db97-58de-43eb-88a8-5ec6f7c4b958"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.664087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8" (OuterVolumeSpecName: "kube-api-access-g2vh8") pod "e908db97-58de-43eb-88a8-5ec6f7c4b958" (UID: "e908db97-58de-43eb-88a8-5ec6f7c4b958"). InnerVolumeSpecName "kube-api-access-g2vh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.684607 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data" (OuterVolumeSpecName: "config-data") pod "e908db97-58de-43eb-88a8-5ec6f7c4b958" (UID: "e908db97-58de-43eb-88a8-5ec6f7c4b958"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.686133 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e908db97-58de-43eb-88a8-5ec6f7c4b958" (UID: "e908db97-58de-43eb-88a8-5ec6f7c4b958"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.761999 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e908db97-58de-43eb-88a8-5ec6f7c4b958-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.762036 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.762052 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2vh8\" (UniqueName: \"kubernetes.io/projected/e908db97-58de-43eb-88a8-5ec6f7c4b958-kube-api-access-g2vh8\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.762072 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e908db97-58de-43eb-88a8-5ec6f7c4b958-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978193 4982 generic.go:334] "Generic (PLEG): container finished" podID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerID="bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" exitCode=0 Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978225 4982 generic.go:334] "Generic (PLEG): container finished" podID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerID="4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" exitCode=143 Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978280 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerDied","Data":"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d"} Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978435 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerDied","Data":"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e"} Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978455 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e908db97-58de-43eb-88a8-5ec6f7c4b958","Type":"ContainerDied","Data":"e4e19249a5af9d3534cfad92d1397b75b2fd07e5ee7fca9ea0b282d473229f0e"} Jan 22 06:08:52 crc kubenswrapper[4982]: I0122 06:08:52.978483 4982 scope.go:117] "RemoveContainer" containerID="bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.007177 4982 scope.go:117] "RemoveContainer" containerID="4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.022228 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.034606 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.053117 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:53 crc kubenswrapper[4982]: E0122 06:08:53.053644 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-metadata" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.053661 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-metadata" Jan 22 06:08:53 crc kubenswrapper[4982]: E0122 06:08:53.053672 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-log" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.053678 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-log" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.053867 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-metadata" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.053895 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" containerName="nova-metadata-log" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.054734 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.055450 4982 scope.go:117] "RemoveContainer" containerID="bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.057190 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.057483 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 06:08:53 crc kubenswrapper[4982]: E0122 06:08:53.058871 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d\": container with ID starting with bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d not found: ID does not exist" containerID="bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.058905 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d"} err="failed to get container status \"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d\": rpc error: code = NotFound desc = could not find container \"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d\": container with ID starting with bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d not found: ID does not exist" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.058927 4982 scope.go:117] "RemoveContainer" containerID="4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" Jan 22 06:08:53 crc kubenswrapper[4982]: E0122 06:08:53.061213 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e\": container with ID starting with 4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e not found: ID does not exist" containerID="4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.061237 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e"} err="failed to get container status \"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e\": rpc error: code = NotFound desc = could not find container \"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e\": container with ID starting with 4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e not found: ID does not exist" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.061249 4982 scope.go:117] "RemoveContainer" containerID="bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.061421 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d"} err="failed to get container status \"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d\": rpc error: code = NotFound desc = could not find container \"bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d\": container with ID starting with bf7687ed20c0f0d594abc68153c62fdf347607f53d64daa1cea9897da6c5bc3d not found: ID does not exist" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.061435 4982 scope.go:117] "RemoveContainer" containerID="4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.061585 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e"} err="failed to get container status \"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e\": rpc error: code = NotFound desc = could not find container \"4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e\": container with ID starting with 4f5b17e0d0a251a44e376c9928c5464b0a50a66770c3bb90f5f1519e980ffd2e not found: ID does not exist" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.083776 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.173683 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trz5p\" (UniqueName: \"kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.174672 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.174728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.174771 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.174801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.276869 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trz5p\" (UniqueName: \"kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.276946 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.276984 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.277043 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.277082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.277565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.284679 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.285477 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.297265 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.307441 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trz5p\" (UniqueName: \"kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p\") pod \"nova-metadata-0\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.387025 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.736314 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e908db97-58de-43eb-88a8-5ec6f7c4b958" path="/var/lib/kubelet/pods/e908db97-58de-43eb-88a8-5ec6f7c4b958/volumes" Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.901641 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:53 crc kubenswrapper[4982]: I0122 06:08:53.987680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerStarted","Data":"4bfa08be04fcda4286fe9af6d9cfa5cdaf7f04f6b6d6223cf1893d48e9996450"} Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.001022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerStarted","Data":"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c"} Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.001342 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerStarted","Data":"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87"} Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.002652 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" containerID="baa68c4b84e0bd3b726cd5648b5cb36eaef6c60fc2ea86a61307f20df71f6fca" exitCode=0 Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.002715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" event={"ID":"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c","Type":"ContainerDied","Data":"baa68c4b84e0bd3b726cd5648b5cb36eaef6c60fc2ea86a61307f20df71f6fca"} Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.006301 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c26adbc-0d63-423d-b805-bc5e866bf6f2" containerID="8c0bf9c9c622b073e5e691a41bfe671bee3ec8a34d459fc686856f0e0eef9678" exitCode=0 Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.006344 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cpdrg" event={"ID":"6c26adbc-0d63-423d-b805-bc5e866bf6f2","Type":"ContainerDied","Data":"8c0bf9c9c622b073e5e691a41bfe671bee3ec8a34d459fc686856f0e0eef9678"} Jan 22 06:08:55 crc kubenswrapper[4982]: I0122 06:08:55.037304 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.037279338 podStartE2EDuration="2.037279338s" podCreationTimestamp="2026-01-22 06:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:55.024215808 +0000 UTC m=+1395.862853821" watchObservedRunningTime="2026-01-22 06:08:55.037279338 +0000 UTC m=+1395.875917361" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.235966 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.236398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.278882 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.347182 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.489684 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.506003 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.506046 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.523835 4982 scope.go:117] "RemoveContainer" containerID="075cd6ba591283d7205ef4a7dbe1b7ca67fd95dc7a176f05384ddfe4c60e3465" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.544050 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.566934 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts\") pod \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.567043 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data\") pod \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.567233 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle\") pod \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.567290 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwrsb\" (UniqueName: \"kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb\") pod \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\" (UID: \"6c26adbc-0d63-423d-b805-bc5e866bf6f2\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.591099 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts" (OuterVolumeSpecName: "scripts") pod "6c26adbc-0d63-423d-b805-bc5e866bf6f2" (UID: "6c26adbc-0d63-423d-b805-bc5e866bf6f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.593005 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb" (OuterVolumeSpecName: "kube-api-access-mwrsb") pod "6c26adbc-0d63-423d-b805-bc5e866bf6f2" (UID: "6c26adbc-0d63-423d-b805-bc5e866bf6f2"). InnerVolumeSpecName "kube-api-access-mwrsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.652101 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c26adbc-0d63-423d-b805-bc5e866bf6f2" (UID: "6c26adbc-0d63-423d-b805-bc5e866bf6f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.652793 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.656329 4982 scope.go:117] "RemoveContainer" containerID="0a5f77140c5ac25a131ce4d46af9e1eae0d6bc9a19369a4d65fb27fd56eaed7f" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.665181 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.665508 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="dnsmasq-dns" containerID="cri-o://46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602" gracePeriod=10 Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.671664 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data" (OuterVolumeSpecName: "config-data") pod "6c26adbc-0d63-423d-b805-bc5e866bf6f2" (UID: "6c26adbc-0d63-423d-b805-bc5e866bf6f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.687888 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.687920 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwrsb\" (UniqueName: \"kubernetes.io/projected/6c26adbc-0d63-423d-b805-bc5e866bf6f2-kube-api-access-mwrsb\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.687937 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.789358 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data\") pod \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.789515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts\") pod \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.789883 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle\") pod \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.789944 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scl9l\" (UniqueName: \"kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l\") pod \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\" (UID: \"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c\") " Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.790407 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c26adbc-0d63-423d-b805-bc5e866bf6f2-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.793514 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l" (OuterVolumeSpecName: "kube-api-access-scl9l") pod "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" (UID: "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c"). InnerVolumeSpecName "kube-api-access-scl9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.796946 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts" (OuterVolumeSpecName: "scripts") pod "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" (UID: "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.812017 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data" (OuterVolumeSpecName: "config-data") pod "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" (UID: "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.819562 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" (UID: "2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.891809 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.891882 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.891891 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:56 crc kubenswrapper[4982]: I0122 06:08:56.891900 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scl9l\" (UniqueName: \"kubernetes.io/projected/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c-kube-api-access-scl9l\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.040173 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.040172 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r9qxz" event={"ID":"2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c","Type":"ContainerDied","Data":"5ca3e0a2e2ddbb1bd157c6a42891f2e6ea3f4e0b60a934d6009fab5d870b986c"} Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.040618 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ca3e0a2e2ddbb1bd157c6a42891f2e6ea3f4e0b60a934d6009fab5d870b986c" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.041699 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cpdrg" event={"ID":"6c26adbc-0d63-423d-b805-bc5e866bf6f2","Type":"ContainerDied","Data":"a0ad193e8899ba5f386890fc460715811f27192885e398c2d2caf5e6a3afc1c6"} Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.041736 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0ad193e8899ba5f386890fc460715811f27192885e398c2d2caf5e6a3afc1c6" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.041769 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cpdrg" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.089720 4982 scope.go:117] "RemoveContainer" containerID="d07915a3ccec37bc37f136dc97b5ea6fa1ecec592c5b9c62370e812644071a8a" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.102493 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.168932 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:08:57 crc kubenswrapper[4982]: E0122 06:08:57.169267 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" containerName="nova-cell1-conductor-db-sync" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.169285 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" containerName="nova-cell1-conductor-db-sync" Jan 22 06:08:57 crc kubenswrapper[4982]: E0122 06:08:57.169314 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c26adbc-0d63-423d-b805-bc5e866bf6f2" containerName="nova-manage" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.169321 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c26adbc-0d63-423d-b805-bc5e866bf6f2" containerName="nova-manage" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.169485 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c26adbc-0d63-423d-b805-bc5e866bf6f2" containerName="nova-manage" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.169513 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" containerName="nova-cell1-conductor-db-sync" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.170197 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.195143 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.200126 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.200178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h69g8\" (UniqueName: \"kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.200222 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.221533 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.301684 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.301730 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h69g8\" (UniqueName: \"kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.301787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.305568 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.308702 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.325826 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.326153 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-log" containerID="cri-o://b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad" gracePeriod=30 Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.326643 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-api" containerID="cri-o://9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958" gracePeriod=30 Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.329866 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.330082 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-log" containerID="cri-o://b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" gracePeriod=30 Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.330222 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-metadata" containerID="cri-o://d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" gracePeriod=30 Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.331653 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h69g8\" (UniqueName: \"kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8\") pod \"nova-cell1-conductor-0\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.344718 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": EOF" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.344884 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": EOF" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.534410 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.676404 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.784168 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813011 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813530 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813559 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813588 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9pq4\" (UniqueName: \"kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.813623 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc\") pod \"df7ef838-a20d-4c64-bc92-e2af11097e20\" (UID: \"df7ef838-a20d-4c64-bc92-e2af11097e20\") " Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.821230 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4" (OuterVolumeSpecName: "kube-api-access-q9pq4") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "kube-api-access-q9pq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.865090 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.868551 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.872459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config" (OuterVolumeSpecName: "config") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.886467 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.914432 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "df7ef838-a20d-4c64-bc92-e2af11097e20" (UID: "df7ef838-a20d-4c64-bc92-e2af11097e20"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926243 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926277 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926288 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926297 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926306 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df7ef838-a20d-4c64-bc92-e2af11097e20-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.926316 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9pq4\" (UniqueName: \"kubernetes.io/projected/df7ef838-a20d-4c64-bc92-e2af11097e20-kube-api-access-q9pq4\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:57 crc kubenswrapper[4982]: I0122 06:08:57.937985 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.027592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data\") pod \"d43c0225-839e-4849-8895-7ce2475eaf32\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.027655 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs\") pod \"d43c0225-839e-4849-8895-7ce2475eaf32\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.027795 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs\") pod \"d43c0225-839e-4849-8895-7ce2475eaf32\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.027813 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle\") pod \"d43c0225-839e-4849-8895-7ce2475eaf32\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.027867 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trz5p\" (UniqueName: \"kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p\") pod \"d43c0225-839e-4849-8895-7ce2475eaf32\" (UID: \"d43c0225-839e-4849-8895-7ce2475eaf32\") " Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.028111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs" (OuterVolumeSpecName: "logs") pod "d43c0225-839e-4849-8895-7ce2475eaf32" (UID: "d43c0225-839e-4849-8895-7ce2475eaf32"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.037441 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p" (OuterVolumeSpecName: "kube-api-access-trz5p") pod "d43c0225-839e-4849-8895-7ce2475eaf32" (UID: "d43c0225-839e-4849-8895-7ce2475eaf32"). InnerVolumeSpecName "kube-api-access-trz5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.063021 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data" (OuterVolumeSpecName: "config-data") pod "d43c0225-839e-4849-8895-7ce2475eaf32" (UID: "d43c0225-839e-4849-8895-7ce2475eaf32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.063162 4982 generic.go:334] "Generic (PLEG): container finished" podID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerID="b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad" exitCode=143 Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.063284 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerDied","Data":"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.068370 4982 generic.go:334] "Generic (PLEG): container finished" podID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerID="46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602" exitCode=0 Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.068603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" event={"ID":"df7ef838-a20d-4c64-bc92-e2af11097e20","Type":"ContainerDied","Data":"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.068775 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" event={"ID":"df7ef838-a20d-4c64-bc92-e2af11097e20","Type":"ContainerDied","Data":"3a59cc3eabe9aeba728662f9c803b489ec68c51ef4c415fcb44ded7ba9f9da78"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.068836 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-sksw6" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.068927 4982 scope.go:117] "RemoveContainer" containerID="46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.069781 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d43c0225-839e-4849-8895-7ce2475eaf32" (UID: "d43c0225-839e-4849-8895-7ce2475eaf32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081012 4982 generic.go:334] "Generic (PLEG): container finished" podID="d43c0225-839e-4849-8895-7ce2475eaf32" containerID="d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" exitCode=0 Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081043 4982 generic.go:334] "Generic (PLEG): container finished" podID="d43c0225-839e-4849-8895-7ce2475eaf32" containerID="b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" exitCode=143 Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081062 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerDied","Data":"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081094 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081134 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerDied","Data":"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.081150 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d43c0225-839e-4849-8895-7ce2475eaf32","Type":"ContainerDied","Data":"4bfa08be04fcda4286fe9af6d9cfa5cdaf7f04f6b6d6223cf1893d48e9996450"} Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.084247 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d43c0225-839e-4849-8895-7ce2475eaf32" (UID: "d43c0225-839e-4849-8895-7ce2475eaf32"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:08:58 crc kubenswrapper[4982]: W0122 06:08:58.090230 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcc9f070_463f_4fef_8eb0_fd0cbe567f70.slice/crio-94a60ea41cc7f0a5b0f5b03abc16266b04dfa37d58bb427c2a16053fcccae0cc WatchSource:0}: Error finding container 94a60ea41cc7f0a5b0f5b03abc16266b04dfa37d58bb427c2a16053fcccae0cc: Status 404 returned error can't find the container with id 94a60ea41cc7f0a5b0f5b03abc16266b04dfa37d58bb427c2a16053fcccae0cc Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.092680 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.106304 4982 scope.go:117] "RemoveContainer" containerID="68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.110913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.129359 4982 scope.go:117] "RemoveContainer" containerID="46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.130240 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602\": container with ID starting with 46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602 not found: ID does not exist" containerID="46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130275 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602"} err="failed to get container status \"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602\": rpc error: code = NotFound desc = could not find container \"46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602\": container with ID starting with 46fe9b706708218bb7062ec9bac3cfce89e3794a42aea6a02f8ebc22bcf7c602 not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130297 4982 scope.go:117] "RemoveContainer" containerID="68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.130510 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94\": container with ID starting with 68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94 not found: ID does not exist" containerID="68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130533 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94"} err="failed to get container status \"68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94\": rpc error: code = NotFound desc = could not find container \"68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94\": container with ID starting with 68bd81db10f3753e4cdaff6c79e2da59ea80b00dc31d7afb95a0b1a2263a5d94 not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130546 4982 scope.go:117] "RemoveContainer" containerID="d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130809 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-sksw6"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130892 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130910 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.130919 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trz5p\" (UniqueName: \"kubernetes.io/projected/d43c0225-839e-4849-8895-7ce2475eaf32-kube-api-access-trz5p\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.131014 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43c0225-839e-4849-8895-7ce2475eaf32-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.131472 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d43c0225-839e-4849-8895-7ce2475eaf32-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.152154 4982 scope.go:117] "RemoveContainer" containerID="b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.186685 4982 scope.go:117] "RemoveContainer" containerID="d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.187192 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c\": container with ID starting with d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c not found: ID does not exist" containerID="d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187230 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c"} err="failed to get container status \"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c\": rpc error: code = NotFound desc = could not find container \"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c\": container with ID starting with d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187257 4982 scope.go:117] "RemoveContainer" containerID="b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.187584 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87\": container with ID starting with b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87 not found: ID does not exist" containerID="b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187617 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87"} err="failed to get container status \"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87\": rpc error: code = NotFound desc = could not find container \"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87\": container with ID starting with b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87 not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187640 4982 scope.go:117] "RemoveContainer" containerID="d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187871 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c"} err="failed to get container status \"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c\": rpc error: code = NotFound desc = could not find container \"d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c\": container with ID starting with d6391ff77e0da2739ba4b3b23a86234ac6450f40824c215256f2da3ff7e63c7c not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.187894 4982 scope.go:117] "RemoveContainer" containerID="b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.188102 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87"} err="failed to get container status \"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87\": rpc error: code = NotFound desc = could not find container \"b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87\": container with ID starting with b01efda91f2c9e284a5b9e5598ecc17022a98395ec936e884fe8a24828d81b87 not found: ID does not exist" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.527556 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.544181 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577387 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.577726 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="dnsmasq-dns" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577739 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="dnsmasq-dns" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.577755 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-metadata" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577761 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-metadata" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.577784 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-log" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577791 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-log" Jan 22 06:08:58 crc kubenswrapper[4982]: E0122 06:08:58.577809 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="init" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577814 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="init" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577968 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-metadata" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.577982 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" containerName="nova-metadata-log" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.578005 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" containerName="dnsmasq-dns" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.578828 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.582325 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.582632 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.591401 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.748331 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.748427 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf6hc\" (UniqueName: \"kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.748478 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.748561 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.748580 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.849759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.849909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.849926 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.849966 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.850019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf6hc\" (UniqueName: \"kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.851123 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.855266 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.855433 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.874399 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.875453 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf6hc\" (UniqueName: \"kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc\") pod \"nova-metadata-0\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " pod="openstack/nova-metadata-0" Jan 22 06:08:58 crc kubenswrapper[4982]: I0122 06:08:58.899286 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.105957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bcc9f070-463f-4fef-8eb0-fd0cbe567f70","Type":"ContainerStarted","Data":"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6"} Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.106223 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bcc9f070-463f-4fef-8eb0-fd0cbe567f70","Type":"ContainerStarted","Data":"94a60ea41cc7f0a5b0f5b03abc16266b04dfa37d58bb427c2a16053fcccae0cc"} Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.106940 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.119111 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerName="nova-scheduler-scheduler" containerID="cri-o://654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" gracePeriod=30 Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.139503 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.139486014 podStartE2EDuration="2.139486014s" podCreationTimestamp="2026-01-22 06:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:08:59.123632012 +0000 UTC m=+1399.962270015" watchObservedRunningTime="2026-01-22 06:08:59.139486014 +0000 UTC m=+1399.978124017" Jan 22 06:08:59 crc kubenswrapper[4982]: I0122 06:08:59.403546 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:00 crc kubenswrapper[4982]: I0122 06:09:00.189544 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d43c0225-839e-4849-8895-7ce2475eaf32" path="/var/lib/kubelet/pods/d43c0225-839e-4849-8895-7ce2475eaf32/volumes" Jan 22 06:09:00 crc kubenswrapper[4982]: I0122 06:09:00.191167 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7ef838-a20d-4c64-bc92-e2af11097e20" path="/var/lib/kubelet/pods/df7ef838-a20d-4c64-bc92-e2af11097e20/volumes" Jan 22 06:09:00 crc kubenswrapper[4982]: I0122 06:09:00.199745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerStarted","Data":"b9de301df8546493e24acddc1699f9edcd6f6d28a84cbdc3c46bfa4f2405b086"} Jan 22 06:09:01 crc kubenswrapper[4982]: I0122 06:09:01.211171 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerStarted","Data":"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3"} Jan 22 06:09:01 crc kubenswrapper[4982]: I0122 06:09:01.211758 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerStarted","Data":"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24"} Jan 22 06:09:01 crc kubenswrapper[4982]: E0122 06:09:01.238047 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:01 crc kubenswrapper[4982]: I0122 06:09:01.238267 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.238247222 podStartE2EDuration="3.238247222s" podCreationTimestamp="2026-01-22 06:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:01.228410826 +0000 UTC m=+1402.067048849" watchObservedRunningTime="2026-01-22 06:09:01.238247222 +0000 UTC m=+1402.076885225" Jan 22 06:09:01 crc kubenswrapper[4982]: E0122 06:09:01.239808 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:01 crc kubenswrapper[4982]: E0122 06:09:01.241376 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:01 crc kubenswrapper[4982]: E0122 06:09:01.241435 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerName="nova-scheduler-scheduler" Jan 22 06:09:01 crc kubenswrapper[4982]: I0122 06:09:01.289914 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.209788 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.219272 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.229836 4982 generic.go:334] "Generic (PLEG): container finished" podID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerID="9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958" exitCode=0 Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.229895 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerDied","Data":"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958"} Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.229970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"da23312a-5e37-47cb-9ffb-f4a5268bf1cc","Type":"ContainerDied","Data":"ae519c91c2bf21b723037607358723506d946825d185b4ebfdcc084983191f2c"} Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.229923 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.230001 4982 scope.go:117] "RemoveContainer" containerID="9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.231654 4982 generic.go:334] "Generic (PLEG): container finished" podID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" exitCode=0 Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.231695 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b00568f8-4b83-4844-bbda-9a24aee96ead","Type":"ContainerDied","Data":"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a"} Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.231727 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b00568f8-4b83-4844-bbda-9a24aee96ead","Type":"ContainerDied","Data":"72ed7252cb235c9cb14a169399457bc7553e6d4745e93f486c55ebd8a1f62168"} Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.231839 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.266509 4982 scope.go:117] "RemoveContainer" containerID="b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.299632 4982 scope.go:117] "RemoveContainer" containerID="9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958" Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.300110 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958\": container with ID starting with 9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958 not found: ID does not exist" containerID="9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.300147 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958"} err="failed to get container status \"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958\": rpc error: code = NotFound desc = could not find container \"9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958\": container with ID starting with 9bab7936a613cc74a0a3f83010b1232cac6070b00ba1b55063be6c1f7558e958 not found: ID does not exist" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.300172 4982 scope.go:117] "RemoveContainer" containerID="b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad" Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.300597 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad\": container with ID starting with b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad not found: ID does not exist" containerID="b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.300690 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad"} err="failed to get container status \"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad\": rpc error: code = NotFound desc = could not find container \"b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad\": container with ID starting with b7ea765156c7ce1789e718d25853d851de2fe8cae3426355604efef6bdbca3ad not found: ID does not exist" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.300764 4982 scope.go:117] "RemoveContainer" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.323720 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle\") pod \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.323781 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtkp5\" (UniqueName: \"kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5\") pod \"b00568f8-4b83-4844-bbda-9a24aee96ead\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.323893 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data\") pod \"b00568f8-4b83-4844-bbda-9a24aee96ead\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.323960 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data\") pod \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.323982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle\") pod \"b00568f8-4b83-4844-bbda-9a24aee96ead\" (UID: \"b00568f8-4b83-4844-bbda-9a24aee96ead\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.324022 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs\") pod \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.324046 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5czq\" (UniqueName: \"kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq\") pod \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\" (UID: \"da23312a-5e37-47cb-9ffb-f4a5268bf1cc\") " Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.326069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs" (OuterVolumeSpecName: "logs") pod "da23312a-5e37-47cb-9ffb-f4a5268bf1cc" (UID: "da23312a-5e37-47cb-9ffb-f4a5268bf1cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.327224 4982 scope.go:117] "RemoveContainer" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.332054 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a\": container with ID starting with 654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a not found: ID does not exist" containerID="654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.332122 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a"} err="failed to get container status \"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a\": rpc error: code = NotFound desc = could not find container \"654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a\": container with ID starting with 654241e2775a0145645996c65d3aaa20c5193d51a7f39e5c01584b636f5e211a not found: ID does not exist" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.338442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq" (OuterVolumeSpecName: "kube-api-access-n5czq") pod "da23312a-5e37-47cb-9ffb-f4a5268bf1cc" (UID: "da23312a-5e37-47cb-9ffb-f4a5268bf1cc"). InnerVolumeSpecName "kube-api-access-n5czq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.338506 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5" (OuterVolumeSpecName: "kube-api-access-jtkp5") pod "b00568f8-4b83-4844-bbda-9a24aee96ead" (UID: "b00568f8-4b83-4844-bbda-9a24aee96ead"). InnerVolumeSpecName "kube-api-access-jtkp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.353568 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b00568f8-4b83-4844-bbda-9a24aee96ead" (UID: "b00568f8-4b83-4844-bbda-9a24aee96ead"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.366996 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data" (OuterVolumeSpecName: "config-data") pod "b00568f8-4b83-4844-bbda-9a24aee96ead" (UID: "b00568f8-4b83-4844-bbda-9a24aee96ead"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.375821 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da23312a-5e37-47cb-9ffb-f4a5268bf1cc" (UID: "da23312a-5e37-47cb-9ffb-f4a5268bf1cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.390001 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data" (OuterVolumeSpecName: "config-data") pod "da23312a-5e37-47cb-9ffb-f4a5268bf1cc" (UID: "da23312a-5e37-47cb-9ffb-f4a5268bf1cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.425633 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.425881 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.425953 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00568f8-4b83-4844-bbda-9a24aee96ead-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.426026 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.426087 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5czq\" (UniqueName: \"kubernetes.io/projected/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-kube-api-access-n5czq\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.426143 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da23312a-5e37-47cb-9ffb-f4a5268bf1cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.426230 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtkp5\" (UniqueName: \"kubernetes.io/projected/b00568f8-4b83-4844-bbda-9a24aee96ead-kube-api-access-jtkp5\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.582398 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.612698 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.619111 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.628276 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.628716 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-log" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.628733 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-log" Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.628745 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerName="nova-scheduler-scheduler" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.628752 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerName="nova-scheduler-scheduler" Jan 22 06:09:03 crc kubenswrapper[4982]: E0122 06:09:03.628791 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-api" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.628798 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-api" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.628978 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-api" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.629003 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" containerName="nova-api-log" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.629020 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" containerName="nova-scheduler-scheduler" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.630073 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.636137 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.637299 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.644217 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.655733 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.657240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.660118 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.665692 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.731455 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7bpj\" (UniqueName: \"kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.731512 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.731596 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.731626 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.732002 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b00568f8-4b83-4844-bbda-9a24aee96ead" path="/var/lib/kubelet/pods/b00568f8-4b83-4844-bbda-9a24aee96ead/volumes" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.732630 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da23312a-5e37-47cb-9ffb-f4a5268bf1cc" path="/var/lib/kubelet/pods/da23312a-5e37-47cb-9ffb-f4a5268bf1cc/volumes" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833475 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833815 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833886 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833915 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7bpj\" (UniqueName: \"kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.833994 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.834013 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8qw9\" (UniqueName: \"kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.834488 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.838372 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.844455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.862030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7bpj\" (UniqueName: \"kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj\") pod \"nova-api-0\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.900028 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.900091 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.935582 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.935677 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.935714 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8qw9\" (UniqueName: \"kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.939491 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.939795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.951115 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.960583 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8qw9\" (UniqueName: \"kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9\") pod \"nova-scheduler-0\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:03 crc kubenswrapper[4982]: I0122 06:09:03.981391 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:04 crc kubenswrapper[4982]: I0122 06:09:04.419091 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:04 crc kubenswrapper[4982]: W0122 06:09:04.421420 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a89c209_8e3e_480d_af7c_fb721529fc5d.slice/crio-1fe3357d0eff614c89d7739feee37f7d73c795fe0e27c635326f23bcedd22a01 WatchSource:0}: Error finding container 1fe3357d0eff614c89d7739feee37f7d73c795fe0e27c635326f23bcedd22a01: Status 404 returned error can't find the container with id 1fe3357d0eff614c89d7739feee37f7d73c795fe0e27c635326f23bcedd22a01 Jan 22 06:09:04 crc kubenswrapper[4982]: I0122 06:09:04.530055 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.252494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerStarted","Data":"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c"} Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.252775 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerStarted","Data":"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068"} Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.252787 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerStarted","Data":"1fe3357d0eff614c89d7739feee37f7d73c795fe0e27c635326f23bcedd22a01"} Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.254496 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c10441d-8606-4622-b83f-033f239987e8","Type":"ContainerStarted","Data":"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e"} Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.254536 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c10441d-8606-4622-b83f-033f239987e8","Type":"ContainerStarted","Data":"e8881cb077d9a8065f100c2883ab3dca45770bdbdd334db9e40a6e91a4156ac5"} Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.270237 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.270220332 podStartE2EDuration="2.270220332s" podCreationTimestamp="2026-01-22 06:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:05.266118795 +0000 UTC m=+1406.104756798" watchObservedRunningTime="2026-01-22 06:09:05.270220332 +0000 UTC m=+1406.108858335" Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.285170 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.28515246 podStartE2EDuration="2.28515246s" podCreationTimestamp="2026-01-22 06:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:05.282576603 +0000 UTC m=+1406.121214606" watchObservedRunningTime="2026-01-22 06:09:05.28515246 +0000 UTC m=+1406.123790463" Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.325393 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.325807 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerName="kube-state-metrics" containerID="cri-o://2ba701ffb7e81c8e8e48691e3fdbe161394306846afd9348c67bf422d21c6517" gracePeriod=30 Jan 22 06:09:05 crc kubenswrapper[4982]: I0122 06:09:05.768068 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.107:8081/readyz\": dial tcp 10.217.0.107:8081: connect: connection refused" Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.273685 4982 generic.go:334] "Generic (PLEG): container finished" podID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerID="2ba701ffb7e81c8e8e48691e3fdbe161394306846afd9348c67bf422d21c6517" exitCode=2 Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.273840 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee","Type":"ContainerDied","Data":"2ba701ffb7e81c8e8e48691e3fdbe161394306846afd9348c67bf422d21c6517"} Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.665933 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.833354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx642\" (UniqueName: \"kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642\") pod \"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee\" (UID: \"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee\") " Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.838080 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642" (OuterVolumeSpecName: "kube-api-access-kx642") pod "e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" (UID: "e75b4d57-9a8d-4185-8dfe-9ce81eb81fee"). InnerVolumeSpecName "kube-api-access-kx642". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:06 crc kubenswrapper[4982]: I0122 06:09:06.935718 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx642\" (UniqueName: \"kubernetes.io/projected/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee-kube-api-access-kx642\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.073525 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.074259 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-central-agent" containerID="cri-o://7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13" gracePeriod=30 Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.074315 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="sg-core" containerID="cri-o://c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4" gracePeriod=30 Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.074345 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-notification-agent" containerID="cri-o://8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2" gracePeriod=30 Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.074366 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="proxy-httpd" containerID="cri-o://2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f" gracePeriod=30 Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.287915 4982 generic.go:334] "Generic (PLEG): container finished" podID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerID="c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4" exitCode=2 Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.287981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerDied","Data":"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4"} Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.289970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e75b4d57-9a8d-4185-8dfe-9ce81eb81fee","Type":"ContainerDied","Data":"c570c9ae50656c3a32d25903a644765fa5e6f11ec2ec972ceb7843463d07f83f"} Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.290027 4982 scope.go:117] "RemoveContainer" containerID="2ba701ffb7e81c8e8e48691e3fdbe161394306846afd9348c67bf422d21c6517" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.290025 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.346477 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.357370 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.369897 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:07 crc kubenswrapper[4982]: E0122 06:09:07.370247 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerName="kube-state-metrics" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.370263 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerName="kube-state-metrics" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.374990 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" containerName="kube-state-metrics" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.376068 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.380061 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.384929 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.407926 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.553105 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.553200 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.553285 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgfm4\" (UniqueName: \"kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.553358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.573472 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.653964 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgfm4\" (UniqueName: \"kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.654051 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.654165 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.654216 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.659920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.660467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.663249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.679381 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgfm4\" (UniqueName: \"kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4\") pod \"kube-state-metrics-0\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " pod="openstack/kube-state-metrics-0" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.730981 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e75b4d57-9a8d-4185-8dfe-9ce81eb81fee" path="/var/lib/kubelet/pods/e75b4d57-9a8d-4185-8dfe-9ce81eb81fee/volumes" Jan 22 06:09:07 crc kubenswrapper[4982]: I0122 06:09:07.734202 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.235111 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:09:08 crc kubenswrapper[4982]: W0122 06:09:08.240544 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07b70872_6840_498b_be43_290f43590bb9.slice/crio-5278f8f984a36118f89c457097510209ba759e5381ece112d058a2b71f01f4d6 WatchSource:0}: Error finding container 5278f8f984a36118f89c457097510209ba759e5381ece112d058a2b71f01f4d6: Status 404 returned error can't find the container with id 5278f8f984a36118f89c457097510209ba759e5381ece112d058a2b71f01f4d6 Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.242730 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.298926 4982 generic.go:334] "Generic (PLEG): container finished" podID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerID="2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f" exitCode=0 Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.298953 4982 generic.go:334] "Generic (PLEG): container finished" podID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerID="7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13" exitCode=0 Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.298989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerDied","Data":"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f"} Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.299015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerDied","Data":"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13"} Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.300897 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"07b70872-6840-498b-be43-290f43590bb9","Type":"ContainerStarted","Data":"5278f8f984a36118f89c457097510209ba759e5381ece112d058a2b71f01f4d6"} Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.900104 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.900397 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 06:09:08 crc kubenswrapper[4982]: I0122 06:09:08.982430 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.120339 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.280954 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281005 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281111 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281151 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsprz\" (UniqueName: \"kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281228 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281275 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281348 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml\") pod \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\" (UID: \"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6\") " Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281489 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281710 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.281730 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.285770 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts" (OuterVolumeSpecName: "scripts") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.289546 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz" (OuterVolumeSpecName: "kube-api-access-jsprz") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "kube-api-access-jsprz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.311084 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.316228 4982 generic.go:334] "Generic (PLEG): container finished" podID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerID="8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2" exitCode=0 Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.316291 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerDied","Data":"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2"} Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.316324 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"408e52a9-8be5-49ff-9cbd-dbbdc7e726a6","Type":"ContainerDied","Data":"a56d539ff4043d69c57f705071e8f40be8d6cebb41b3378534697264a41b23ba"} Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.316344 4982 scope.go:117] "RemoveContainer" containerID="2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.316493 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.327210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"07b70872-6840-498b-be43-290f43590bb9","Type":"ContainerStarted","Data":"addbc96e6718c32e48886fcf4a4c3eca72d548b434d5453710a2fcc4daabe338"} Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.328489 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.351587 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.994998642 podStartE2EDuration="2.351563055s" podCreationTimestamp="2026-01-22 06:09:07 +0000 UTC" firstStartedPulling="2026-01-22 06:09:08.242473074 +0000 UTC m=+1409.081111087" lastFinishedPulling="2026-01-22 06:09:08.599037487 +0000 UTC m=+1409.437675500" observedRunningTime="2026-01-22 06:09:09.343226808 +0000 UTC m=+1410.181864811" watchObservedRunningTime="2026-01-22 06:09:09.351563055 +0000 UTC m=+1410.190201058" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.353167 4982 scope.go:117] "RemoveContainer" containerID="c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.355680 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.373616 4982 scope.go:117] "RemoveContainer" containerID="8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.383904 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsprz\" (UniqueName: \"kubernetes.io/projected/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-kube-api-access-jsprz\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.383982 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.383995 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.384008 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.384019 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.397597 4982 scope.go:117] "RemoveContainer" containerID="7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.401020 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data" (OuterVolumeSpecName: "config-data") pod "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" (UID: "408e52a9-8be5-49ff-9cbd-dbbdc7e726a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.417764 4982 scope.go:117] "RemoveContainer" containerID="2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.418257 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f\": container with ID starting with 2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f not found: ID does not exist" containerID="2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.418316 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f"} err="failed to get container status \"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f\": rpc error: code = NotFound desc = could not find container \"2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f\": container with ID starting with 2459f512f93fe5e9f0a1dea36f63b700f0dbc388a393db53b4467cd3c90c1b4f not found: ID does not exist" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.418350 4982 scope.go:117] "RemoveContainer" containerID="c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.418649 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4\": container with ID starting with c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4 not found: ID does not exist" containerID="c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.418690 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4"} err="failed to get container status \"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4\": rpc error: code = NotFound desc = could not find container \"c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4\": container with ID starting with c8aff2806002bb2c78191141373211dece6ce92f5d5f7c7e37491f2b182fc6a4 not found: ID does not exist" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.418706 4982 scope.go:117] "RemoveContainer" containerID="8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.418985 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2\": container with ID starting with 8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2 not found: ID does not exist" containerID="8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.419013 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2"} err="failed to get container status \"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2\": rpc error: code = NotFound desc = could not find container \"8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2\": container with ID starting with 8572721d4488decbe4ee74263447daaddadd5f62739cd82be298e54fdcde78c2 not found: ID does not exist" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.419032 4982 scope.go:117] "RemoveContainer" containerID="7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.419309 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13\": container with ID starting with 7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13 not found: ID does not exist" containerID="7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.419352 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13"} err="failed to get container status \"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13\": rpc error: code = NotFound desc = could not find container \"7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13\": container with ID starting with 7ba2d065f2e3d86f71f4e09d16ca60d013c5d39259ad1783544624f176119a13 not found: ID does not exist" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.486111 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.695015 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.702528 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.736593 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" path="/var/lib/kubelet/pods/408e52a9-8be5-49ff-9cbd-dbbdc7e726a6/volumes" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740100 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.740599 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-central-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740617 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-central-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.740640 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="sg-core" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740648 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="sg-core" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.740668 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="proxy-httpd" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740674 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="proxy-httpd" Jan 22 06:09:09 crc kubenswrapper[4982]: E0122 06:09:09.740683 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-notification-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740689 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-notification-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740843 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="sg-core" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740880 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-central-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740896 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="proxy-httpd" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.740908 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="408e52a9-8be5-49ff-9cbd-dbbdc7e726a6" containerName="ceilometer-notification-agent" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.746136 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.751635 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.752041 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.752371 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.752564 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902472 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902608 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902647 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902685 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902872 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mxbd\" (UniqueName: \"kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.902941 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.919042 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:09 crc kubenswrapper[4982]: I0122 06:09:09.919091 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004585 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004653 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004724 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mxbd\" (UniqueName: \"kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004827 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004861 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.004925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.006301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.006310 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.009513 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.013264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.018967 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.020821 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.025396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mxbd\" (UniqueName: \"kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.027108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data\") pod \"ceilometer-0\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.095951 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:10 crc kubenswrapper[4982]: I0122 06:09:10.587904 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:11 crc kubenswrapper[4982]: I0122 06:09:11.350256 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerStarted","Data":"f9380a37007c1812cd5a11bc8ef13accb15ad95a372aad9f3226011cee707c2c"} Jan 22 06:09:12 crc kubenswrapper[4982]: I0122 06:09:12.358936 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerStarted","Data":"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818"} Jan 22 06:09:12 crc kubenswrapper[4982]: I0122 06:09:12.359543 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerStarted","Data":"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1"} Jan 22 06:09:13 crc kubenswrapper[4982]: I0122 06:09:13.370100 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerStarted","Data":"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1"} Jan 22 06:09:13 crc kubenswrapper[4982]: I0122 06:09:13.952217 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:09:13 crc kubenswrapper[4982]: I0122 06:09:13.952765 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:09:13 crc kubenswrapper[4982]: I0122 06:09:13.982502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 06:09:14 crc kubenswrapper[4982]: I0122 06:09:14.019364 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 06:09:14 crc kubenswrapper[4982]: I0122 06:09:14.416022 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 06:09:15 crc kubenswrapper[4982]: I0122 06:09:15.035054 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:15 crc kubenswrapper[4982]: I0122 06:09:15.035342 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:15 crc kubenswrapper[4982]: I0122 06:09:15.390812 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerStarted","Data":"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13"} Jan 22 06:09:15 crc kubenswrapper[4982]: I0122 06:09:15.416118 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.651294439 podStartE2EDuration="6.416102881s" podCreationTimestamp="2026-01-22 06:09:09 +0000 UTC" firstStartedPulling="2026-01-22 06:09:10.608248715 +0000 UTC m=+1411.446886718" lastFinishedPulling="2026-01-22 06:09:14.373057157 +0000 UTC m=+1415.211695160" observedRunningTime="2026-01-22 06:09:15.408085183 +0000 UTC m=+1416.246723196" watchObservedRunningTime="2026-01-22 06:09:15.416102881 +0000 UTC m=+1416.254740884" Jan 22 06:09:16 crc kubenswrapper[4982]: I0122 06:09:16.399457 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.406832 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.409211 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.445192 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.453571 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.453631 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkzdw\" (UniqueName: \"kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.453668 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.556169 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.556233 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkzdw\" (UniqueName: \"kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.556272 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.556611 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.557018 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.599481 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkzdw\" (UniqueName: \"kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw\") pod \"redhat-operators-fqrnq\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.747326 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:17 crc kubenswrapper[4982]: I0122 06:09:17.762603 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.366767 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:18 crc kubenswrapper[4982]: W0122 06:09:18.370095 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf885c65c_ba3d_498c_a094_4abbc117efdc.slice/crio-3643f87b38e5a5c216b86137f5e34f8b69f86191550c34a15ee53ee196c33f03 WatchSource:0}: Error finding container 3643f87b38e5a5c216b86137f5e34f8b69f86191550c34a15ee53ee196c33f03: Status 404 returned error can't find the container with id 3643f87b38e5a5c216b86137f5e34f8b69f86191550c34a15ee53ee196c33f03 Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.418809 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerStarted","Data":"3643f87b38e5a5c216b86137f5e34f8b69f86191550c34a15ee53ee196c33f03"} Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.903955 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.914431 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.917749 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.974394 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:09:18 crc kubenswrapper[4982]: I0122 06:09:18.974457 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:09:19 crc kubenswrapper[4982]: I0122 06:09:19.429557 4982 generic.go:334] "Generic (PLEG): container finished" podID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerID="124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08" exitCode=0 Jan 22 06:09:19 crc kubenswrapper[4982]: I0122 06:09:19.429610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerDied","Data":"124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08"} Jan 22 06:09:19 crc kubenswrapper[4982]: I0122 06:09:19.437936 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 06:09:21 crc kubenswrapper[4982]: I0122 06:09:21.465230 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerStarted","Data":"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693"} Jan 22 06:09:22 crc kubenswrapper[4982]: I0122 06:09:22.478800 4982 generic.go:334] "Generic (PLEG): container finished" podID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" containerID="1a1ebed28006139fb5213e7b71ef77012030d9bba6e3862a0578b298c207244d" exitCode=137 Jan 22 06:09:22 crc kubenswrapper[4982]: I0122 06:09:22.478887 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bc86273-7064-4dd4-a7c6-8d809c420ebc","Type":"ContainerDied","Data":"1a1ebed28006139fb5213e7b71ef77012030d9bba6e3862a0578b298c207244d"} Jan 22 06:09:22 crc kubenswrapper[4982]: I0122 06:09:22.950212 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.067561 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data\") pod \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.067656 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c68kj\" (UniqueName: \"kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj\") pod \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.067870 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle\") pod \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\" (UID: \"5bc86273-7064-4dd4-a7c6-8d809c420ebc\") " Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.075838 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj" (OuterVolumeSpecName: "kube-api-access-c68kj") pod "5bc86273-7064-4dd4-a7c6-8d809c420ebc" (UID: "5bc86273-7064-4dd4-a7c6-8d809c420ebc"). InnerVolumeSpecName "kube-api-access-c68kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.097310 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bc86273-7064-4dd4-a7c6-8d809c420ebc" (UID: "5bc86273-7064-4dd4-a7c6-8d809c420ebc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.097388 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data" (OuterVolumeSpecName: "config-data") pod "5bc86273-7064-4dd4-a7c6-8d809c420ebc" (UID: "5bc86273-7064-4dd4-a7c6-8d809c420ebc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.170293 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.170592 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bc86273-7064-4dd4-a7c6-8d809c420ebc-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.170726 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c68kj\" (UniqueName: \"kubernetes.io/projected/5bc86273-7064-4dd4-a7c6-8d809c420ebc-kube-api-access-c68kj\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.490953 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bc86273-7064-4dd4-a7c6-8d809c420ebc","Type":"ContainerDied","Data":"14650da8b438b35223c0787dc2bf5319a6537b08785461c0a44047d3d91ffe3f"} Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.491004 4982 scope.go:117] "RemoveContainer" containerID="1a1ebed28006139fb5213e7b71ef77012030d9bba6e3862a0578b298c207244d" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.491027 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.494785 4982 generic.go:334] "Generic (PLEG): container finished" podID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerID="131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693" exitCode=0 Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.494830 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerDied","Data":"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693"} Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.565684 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.572985 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.583761 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:09:23 crc kubenswrapper[4982]: E0122 06:09:23.584611 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.584717 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.585051 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.585752 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.589531 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.589682 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.589756 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.632088 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.731452 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bc86273-7064-4dd4-a7c6-8d809c420ebc" path="/var/lib/kubelet/pods/5bc86273-7064-4dd4-a7c6-8d809c420ebc/volumes" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.783699 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.783772 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7gxj\" (UniqueName: \"kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.784157 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.784265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.784372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.886765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.886868 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7gxj\" (UniqueName: \"kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.886955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.886979 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.887021 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.891488 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.891539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.892627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.893681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.919303 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7gxj\" (UniqueName: \"kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj\") pod \"nova-cell1-novncproxy-0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.921972 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.963208 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.963809 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.979406 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 06:09:23 crc kubenswrapper[4982]: I0122 06:09:23.981677 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.456491 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.509505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eff35669-e559-41ee-9a8b-114907cd6ab0","Type":"ContainerStarted","Data":"ba8484cd48d8bef98f75cf838831f3280fdbf6efb0419f484b66489e24684ac5"} Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.509603 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.517232 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.752973 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.754554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.838431 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930689 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930796 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930815 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930864 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwvjc\" (UniqueName: \"kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:24 crc kubenswrapper[4982]: I0122 06:09:24.930958 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.035985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.036041 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.036119 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.036174 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwvjc\" (UniqueName: \"kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.036207 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.036286 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.037188 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.037302 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.037466 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.037615 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.037899 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.056316 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwvjc\" (UniqueName: \"kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc\") pod \"dnsmasq-dns-fcd6f8f8f-4kgmx\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.069155 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.520078 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eff35669-e559-41ee-9a8b-114907cd6ab0","Type":"ContainerStarted","Data":"5cdd5203835aa0d86d98aea0ae6ed7270810a51fcce1c980a5665cd105b86e96"} Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.524551 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerStarted","Data":"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8"} Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.534307 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:09:25 crc kubenswrapper[4982]: I0122 06:09:25.559554 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.559538667 podStartE2EDuration="2.559538667s" podCreationTimestamp="2026-01-22 06:09:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:25.557198296 +0000 UTC m=+1426.395836299" watchObservedRunningTime="2026-01-22 06:09:25.559538667 +0000 UTC m=+1426.398176660" Jan 22 06:09:26 crc kubenswrapper[4982]: I0122 06:09:26.534629 4982 generic.go:334] "Generic (PLEG): container finished" podID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerID="e0a7f6b0cbf180a5138362a5544c8d671b128b24285c4947149a0f2feef86c33" exitCode=0 Jan 22 06:09:26 crc kubenswrapper[4982]: I0122 06:09:26.534699 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" event={"ID":"b3aeab92-5988-4e7b-9fcf-62c13e364f98","Type":"ContainerDied","Data":"e0a7f6b0cbf180a5138362a5544c8d671b128b24285c4947149a0f2feef86c33"} Jan 22 06:09:26 crc kubenswrapper[4982]: I0122 06:09:26.534732 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" event={"ID":"b3aeab92-5988-4e7b-9fcf-62c13e364f98","Type":"ContainerStarted","Data":"d8c6537a32a128c90801c32a32ab42b41072520eeb92c3cd4b545b201e24f11a"} Jan 22 06:09:26 crc kubenswrapper[4982]: I0122 06:09:26.564120 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fqrnq" podStartSLOduration=4.055496852 podStartE2EDuration="9.564098991s" podCreationTimestamp="2026-01-22 06:09:17 +0000 UTC" firstStartedPulling="2026-01-22 06:09:19.431398578 +0000 UTC m=+1420.270036591" lastFinishedPulling="2026-01-22 06:09:24.940000727 +0000 UTC m=+1425.778638730" observedRunningTime="2026-01-22 06:09:25.587133245 +0000 UTC m=+1426.425771248" watchObservedRunningTime="2026-01-22 06:09:26.564098991 +0000 UTC m=+1427.402736994" Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.149783 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.317688 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.318173 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-central-agent" containerID="cri-o://940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.318290 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="proxy-httpd" containerID="cri-o://61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.318320 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="sg-core" containerID="cri-o://a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.318349 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-notification-agent" containerID="cri-o://0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.337131 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.561829 4982 generic.go:334] "Generic (PLEG): container finished" podID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerID="61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" exitCode=0 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.561895 4982 generic.go:334] "Generic (PLEG): container finished" podID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerID="a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" exitCode=2 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.561943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerDied","Data":"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13"} Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.561975 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerDied","Data":"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1"} Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.564002 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" event={"ID":"b3aeab92-5988-4e7b-9fcf-62c13e364f98","Type":"ContainerStarted","Data":"c57a66ba5563b4184480525b497fbf6412201b1822a40a8b542aa66de3ec6598"} Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.564071 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-log" containerID="cri-o://9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.564181 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-api" containerID="cri-o://527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c" gracePeriod=30 Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.598664 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" podStartSLOduration=3.598646698 podStartE2EDuration="3.598646698s" podCreationTimestamp="2026-01-22 06:09:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:27.591105379 +0000 UTC m=+1428.429743382" watchObservedRunningTime="2026-01-22 06:09:27.598646698 +0000 UTC m=+1428.437284701" Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.747764 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:27 crc kubenswrapper[4982]: I0122 06:09:27.747871 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.446707 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.574278 4982 generic.go:334] "Generic (PLEG): container finished" podID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerID="9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068" exitCode=143 Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.574352 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerDied","Data":"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068"} Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577292 4982 generic.go:334] "Generic (PLEG): container finished" podID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerID="0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" exitCode=0 Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577340 4982 generic.go:334] "Generic (PLEG): container finished" podID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerID="940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" exitCode=0 Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577340 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerDied","Data":"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818"} Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577389 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerDied","Data":"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1"} Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577404 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a70b623-bbd3-4d52-be5d-916a140f5705","Type":"ContainerDied","Data":"f9380a37007c1812cd5a11bc8ef13accb15ad95a372aad9f3226011cee707c2c"} Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577414 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.577422 4982 scope.go:117] "RemoveContainer" containerID="61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.578005 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.602998 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603118 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603165 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603203 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603235 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603285 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603380 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603452 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mxbd\" (UniqueName: \"kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd\") pod \"3a70b623-bbd3-4d52-be5d-916a140f5705\" (UID: \"3a70b623-bbd3-4d52-be5d-916a140f5705\") " Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603492 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.603786 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.604207 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.605955 4982 scope.go:117] "RemoveContainer" containerID="a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.609017 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts" (OuterVolumeSpecName: "scripts") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.615994 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd" (OuterVolumeSpecName: "kube-api-access-7mxbd") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "kube-api-access-7mxbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.629985 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.636905 4982 scope.go:117] "RemoveContainer" containerID="0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.653153 4982 scope.go:117] "RemoveContainer" containerID="940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.675881 4982 scope.go:117] "RemoveContainer" containerID="61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.676473 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13\": container with ID starting with 61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13 not found: ID does not exist" containerID="61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.676502 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13"} err="failed to get container status \"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13\": rpc error: code = NotFound desc = could not find container \"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13\": container with ID starting with 61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.676522 4982 scope.go:117] "RemoveContainer" containerID="a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.679108 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1\": container with ID starting with a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1 not found: ID does not exist" containerID="a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.679134 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1"} err="failed to get container status \"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1\": rpc error: code = NotFound desc = could not find container \"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1\": container with ID starting with a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.679151 4982 scope.go:117] "RemoveContainer" containerID="0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.679565 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818\": container with ID starting with 0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818 not found: ID does not exist" containerID="0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.679643 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818"} err="failed to get container status \"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818\": rpc error: code = NotFound desc = could not find container \"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818\": container with ID starting with 0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.679678 4982 scope.go:117] "RemoveContainer" containerID="940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.679901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.679994 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1\": container with ID starting with 940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1 not found: ID does not exist" containerID="940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680023 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1"} err="failed to get container status \"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1\": rpc error: code = NotFound desc = could not find container \"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1\": container with ID starting with 940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680040 4982 scope.go:117] "RemoveContainer" containerID="61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680296 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13"} err="failed to get container status \"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13\": rpc error: code = NotFound desc = could not find container \"61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13\": container with ID starting with 61d8a78a229a2f55643eb257b791a6f3facaa2df1bd2707c97165bedaba71d13 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680319 4982 scope.go:117] "RemoveContainer" containerID="a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680621 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1"} err="failed to get container status \"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1\": rpc error: code = NotFound desc = could not find container \"a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1\": container with ID starting with a6b70b48c5b1dc56c47ded6534ecdd7e182b9c1c91a264d4ac6c3929f833f8f1 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.680651 4982 scope.go:117] "RemoveContainer" containerID="0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.681828 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818"} err="failed to get container status \"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818\": rpc error: code = NotFound desc = could not find container \"0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818\": container with ID starting with 0a6c5dcdb6e017aa14e366cf201c8466acd5f74426b10a316051d631513d0818 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.681980 4982 scope.go:117] "RemoveContainer" containerID="940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.682293 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1"} err="failed to get container status \"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1\": rpc error: code = NotFound desc = could not find container \"940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1\": container with ID starting with 940fad49c69ab5357125129a50ccbe45067625063a0b7020655fb7d30d389dc1 not found: ID does not exist" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.705066 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.705097 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mxbd\" (UniqueName: \"kubernetes.io/projected/3a70b623-bbd3-4d52-be5d-916a140f5705-kube-api-access-7mxbd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.705110 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a70b623-bbd3-4d52-be5d-916a140f5705-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.705118 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.705128 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.706028 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.744589 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data" (OuterVolumeSpecName: "config-data") pod "3a70b623-bbd3-4d52-be5d-916a140f5705" (UID: "3a70b623-bbd3-4d52-be5d-916a140f5705"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.788320 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fqrnq" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="registry-server" probeResult="failure" output=< Jan 22 06:09:28 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 06:09:28 crc kubenswrapper[4982]: > Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.807343 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.807430 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a70b623-bbd3-4d52-be5d-916a140f5705-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.910428 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.920458 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.922307 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.944963 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.945553 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="proxy-httpd" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.945618 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="proxy-httpd" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.945694 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-central-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.945746 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-central-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.945813 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="sg-core" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.945886 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="sg-core" Jan 22 06:09:28 crc kubenswrapper[4982]: E0122 06:09:28.945943 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-notification-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.946000 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-notification-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.946220 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="sg-core" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.946282 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-central-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.946348 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="ceilometer-notification-agent" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.946414 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" containerName="proxy-httpd" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.948011 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.951344 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.953079 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.953916 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:09:28 crc kubenswrapper[4982]: I0122 06:09:28.982821 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.011823 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.011890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.011909 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsbz6\" (UniqueName: \"kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.011961 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.012032 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.012084 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.012152 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.012279 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114321 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114419 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114496 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsbz6\" (UniqueName: \"kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114527 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114562 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.114591 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.115643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.115725 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.121421 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.121736 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.121807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.121968 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.122870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.134646 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsbz6\" (UniqueName: \"kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6\") pod \"ceilometer-0\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.247635 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.248481 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.676329 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:29 crc kubenswrapper[4982]: I0122 06:09:29.729007 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a70b623-bbd3-4d52-be5d-916a140f5705" path="/var/lib/kubelet/pods/3a70b623-bbd3-4d52-be5d-916a140f5705/volumes" Jan 22 06:09:30 crc kubenswrapper[4982]: I0122 06:09:30.624676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerStarted","Data":"5b60d2e2a1d1289ffe0aaaf52a1f293fe69aaa614f406399bc38b2a1b58307f1"} Jan 22 06:09:30 crc kubenswrapper[4982]: I0122 06:09:30.625083 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerStarted","Data":"c310bd457c35713d030fc922a1560b330a0368a0520f3e9e5b80bfe6262bbca2"} Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.556469 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.645485 4982 generic.go:334] "Generic (PLEG): container finished" podID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerID="527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c" exitCode=0 Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.645541 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerDied","Data":"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c"} Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.645567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a89c209-8e3e-480d-af7c-fb721529fc5d","Type":"ContainerDied","Data":"1fe3357d0eff614c89d7739feee37f7d73c795fe0e27c635326f23bcedd22a01"} Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.645585 4982 scope.go:117] "RemoveContainer" containerID="527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.645721 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.651974 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerStarted","Data":"6da460f48dade095de85e52ee5e115773b95fc39355f4be2d9312925a4711356"} Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.658944 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7bpj\" (UniqueName: \"kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj\") pod \"8a89c209-8e3e-480d-af7c-fb721529fc5d\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.658992 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data\") pod \"8a89c209-8e3e-480d-af7c-fb721529fc5d\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.659046 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs\") pod \"8a89c209-8e3e-480d-af7c-fb721529fc5d\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.659200 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle\") pod \"8a89c209-8e3e-480d-af7c-fb721529fc5d\" (UID: \"8a89c209-8e3e-480d-af7c-fb721529fc5d\") " Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.662903 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs" (OuterVolumeSpecName: "logs") pod "8a89c209-8e3e-480d-af7c-fb721529fc5d" (UID: "8a89c209-8e3e-480d-af7c-fb721529fc5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.668129 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj" (OuterVolumeSpecName: "kube-api-access-d7bpj") pod "8a89c209-8e3e-480d-af7c-fb721529fc5d" (UID: "8a89c209-8e3e-480d-af7c-fb721529fc5d"). InnerVolumeSpecName "kube-api-access-d7bpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.676059 4982 scope.go:117] "RemoveContainer" containerID="9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.697900 4982 scope.go:117] "RemoveContainer" containerID="527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c" Jan 22 06:09:31 crc kubenswrapper[4982]: E0122 06:09:31.698576 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c\": container with ID starting with 527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c not found: ID does not exist" containerID="527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.698625 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c"} err="failed to get container status \"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c\": rpc error: code = NotFound desc = could not find container \"527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c\": container with ID starting with 527c9c4f863991b42e715028f3b84be5fe277253a52d58a78406d7ef30d17e3c not found: ID does not exist" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.698652 4982 scope.go:117] "RemoveContainer" containerID="9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068" Jan 22 06:09:31 crc kubenswrapper[4982]: E0122 06:09:31.699205 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068\": container with ID starting with 9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068 not found: ID does not exist" containerID="9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.699252 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068"} err="failed to get container status \"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068\": rpc error: code = NotFound desc = could not find container \"9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068\": container with ID starting with 9b34094b1dcb0225702177e8c9194fa3787c33edce73af1d24645c621623a068 not found: ID does not exist" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.700944 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a89c209-8e3e-480d-af7c-fb721529fc5d" (UID: "8a89c209-8e3e-480d-af7c-fb721529fc5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.710066 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data" (OuterVolumeSpecName: "config-data") pod "8a89c209-8e3e-480d-af7c-fb721529fc5d" (UID: "8a89c209-8e3e-480d-af7c-fb721529fc5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.762409 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.762438 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a89c209-8e3e-480d-af7c-fb721529fc5d-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.762447 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a89c209-8e3e-480d-af7c-fb721529fc5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.762460 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7bpj\" (UniqueName: \"kubernetes.io/projected/8a89c209-8e3e-480d-af7c-fb721529fc5d-kube-api-access-d7bpj\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.964413 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.971677 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.992136 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:31 crc kubenswrapper[4982]: E0122 06:09:31.992499 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-api" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.992516 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-api" Jan 22 06:09:31 crc kubenswrapper[4982]: E0122 06:09:31.992535 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-log" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.992542 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-log" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.992721 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-log" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.992751 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" containerName="nova-api-api" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.994444 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.997310 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.997421 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 06:09:31 crc kubenswrapper[4982]: I0122 06:09:31.997863 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.014404 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167644 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgvlk\" (UniqueName: \"kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167729 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167828 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.167927 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.269789 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.269860 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.269885 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgvlk\" (UniqueName: \"kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.269940 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.269962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.270009 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.271427 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.275618 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.276049 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.276529 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.290865 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.291957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgvlk\" (UniqueName: \"kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk\") pod \"nova-api-0\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.319566 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.665112 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerStarted","Data":"d19c9f2f1d96ed1cae27f3d329cca0fef6df0367d61d6776ff92c575e67d3386"} Jan 22 06:09:32 crc kubenswrapper[4982]: W0122 06:09:32.780449 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b4399a2_89c9_4b88_8e2e_e0673539ecf6.slice/crio-c09e1e1e8fa9101fefd875cd7ad105ed02f64acc13214a5e6e9f2c89799471e6 WatchSource:0}: Error finding container c09e1e1e8fa9101fefd875cd7ad105ed02f64acc13214a5e6e9f2c89799471e6: Status 404 returned error can't find the container with id c09e1e1e8fa9101fefd875cd7ad105ed02f64acc13214a5e6e9f2c89799471e6 Jan 22 06:09:32 crc kubenswrapper[4982]: I0122 06:09:32.788742 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.675267 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerStarted","Data":"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8"} Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.675609 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerStarted","Data":"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca"} Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.675620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerStarted","Data":"c09e1e1e8fa9101fefd875cd7ad105ed02f64acc13214a5e6e9f2c89799471e6"} Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677531 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerStarted","Data":"846dcac9568b27edafa0ec9c2c747d21c9625ae0f3f5309e075258d5a350f9da"} Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677789 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-central-agent" containerID="cri-o://5b60d2e2a1d1289ffe0aaaf52a1f293fe69aaa614f406399bc38b2a1b58307f1" gracePeriod=30 Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677894 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="proxy-httpd" containerID="cri-o://846dcac9568b27edafa0ec9c2c747d21c9625ae0f3f5309e075258d5a350f9da" gracePeriod=30 Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677834 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="sg-core" containerID="cri-o://d19c9f2f1d96ed1cae27f3d329cca0fef6df0367d61d6776ff92c575e67d3386" gracePeriod=30 Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677870 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-notification-agent" containerID="cri-o://6da460f48dade095de85e52ee5e115773b95fc39355f4be2d9312925a4711356" gracePeriod=30 Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.677890 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.706385 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.706366809 podStartE2EDuration="2.706366809s" podCreationTimestamp="2026-01-22 06:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:33.697905896 +0000 UTC m=+1434.536543909" watchObservedRunningTime="2026-01-22 06:09:33.706366809 +0000 UTC m=+1434.545004812" Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.724965 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.62025981 podStartE2EDuration="5.724945049s" podCreationTimestamp="2026-01-22 06:09:28 +0000 UTC" firstStartedPulling="2026-01-22 06:09:29.683039715 +0000 UTC m=+1430.521677718" lastFinishedPulling="2026-01-22 06:09:32.787724954 +0000 UTC m=+1433.626362957" observedRunningTime="2026-01-22 06:09:33.721009225 +0000 UTC m=+1434.559647218" watchObservedRunningTime="2026-01-22 06:09:33.724945049 +0000 UTC m=+1434.563583052" Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.730259 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a89c209-8e3e-480d-af7c-fb721529fc5d" path="/var/lib/kubelet/pods/8a89c209-8e3e-480d-af7c-fb721529fc5d/volumes" Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.922494 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:33 crc kubenswrapper[4982]: I0122 06:09:33.942731 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686580 4982 generic.go:334] "Generic (PLEG): container finished" podID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerID="846dcac9568b27edafa0ec9c2c747d21c9625ae0f3f5309e075258d5a350f9da" exitCode=0 Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686862 4982 generic.go:334] "Generic (PLEG): container finished" podID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerID="d19c9f2f1d96ed1cae27f3d329cca0fef6df0367d61d6776ff92c575e67d3386" exitCode=2 Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686871 4982 generic.go:334] "Generic (PLEG): container finished" podID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerID="6da460f48dade095de85e52ee5e115773b95fc39355f4be2d9312925a4711356" exitCode=0 Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerDied","Data":"846dcac9568b27edafa0ec9c2c747d21c9625ae0f3f5309e075258d5a350f9da"} Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerDied","Data":"d19c9f2f1d96ed1cae27f3d329cca0fef6df0367d61d6776ff92c575e67d3386"} Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.686996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerDied","Data":"6da460f48dade095de85e52ee5e115773b95fc39355f4be2d9312925a4711356"} Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.704179 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.856695 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-vqgqq"] Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.858827 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.864953 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.867219 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.879331 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vqgqq"] Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.924312 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.924446 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.924478 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvggm\" (UniqueName: \"kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:34 crc kubenswrapper[4982]: I0122 06:09:34.924619 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.026075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.026162 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.026182 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvggm\" (UniqueName: \"kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.026228 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.037507 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.037953 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.038446 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.045143 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvggm\" (UniqueName: \"kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm\") pod \"nova-cell1-cell-mapping-vqgqq\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.071035 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.136279 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.136552 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="dnsmasq-dns" containerID="cri-o://5dc16f0c54d8ae6a28c68a312b07df0a689510c87837e0a4b7e78b5b759f00ea" gracePeriod=10 Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.176531 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.665426 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vqgqq"] Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.698470 4982 generic.go:334] "Generic (PLEG): container finished" podID="9918bf38-2e78-4627-b996-05f42319fb53" containerID="5dc16f0c54d8ae6a28c68a312b07df0a689510c87837e0a4b7e78b5b759f00ea" exitCode=0 Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.698546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" event={"ID":"9918bf38-2e78-4627-b996-05f42319fb53","Type":"ContainerDied","Data":"5dc16f0c54d8ae6a28c68a312b07df0a689510c87837e0a4b7e78b5b759f00ea"} Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.698576 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" event={"ID":"9918bf38-2e78-4627-b996-05f42319fb53","Type":"ContainerDied","Data":"c008dc238a63533c7abd6e70661e790286c6c0f9bdc45bcecda7850d99e0233b"} Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.698591 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c008dc238a63533c7abd6e70661e790286c6c0f9bdc45bcecda7850d99e0233b" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.700583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vqgqq" event={"ID":"113886a4-e806-4fd8-b678-f8051fa8b3ba","Type":"ContainerStarted","Data":"074cfeb37e0979f7ba42891766fac5e5a9428faa37dbf2dd0554381c9d3e0aec"} Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.702364 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842271 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842329 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842355 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842445 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d79cp\" (UniqueName: \"kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842467 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.842546 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb\") pod \"9918bf38-2e78-4627-b996-05f42319fb53\" (UID: \"9918bf38-2e78-4627-b996-05f42319fb53\") " Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.848090 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp" (OuterVolumeSpecName: "kube-api-access-d79cp") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "kube-api-access-d79cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.889596 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.890488 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config" (OuterVolumeSpecName: "config") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.892178 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.895731 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.898171 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9918bf38-2e78-4627-b996-05f42319fb53" (UID: "9918bf38-2e78-4627-b996-05f42319fb53"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945091 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945123 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945132 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945141 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945149 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d79cp\" (UniqueName: \"kubernetes.io/projected/9918bf38-2e78-4627-b996-05f42319fb53-kube-api-access-d79cp\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:35 crc kubenswrapper[4982]: I0122 06:09:35.945158 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9918bf38-2e78-4627-b996-05f42319fb53-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:36 crc kubenswrapper[4982]: I0122 06:09:36.714665 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-qklw7" Jan 22 06:09:36 crc kubenswrapper[4982]: I0122 06:09:36.716367 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vqgqq" event={"ID":"113886a4-e806-4fd8-b678-f8051fa8b3ba","Type":"ContainerStarted","Data":"ac4f72c1c8b619faa864b7a4128f74ea5d0ae3cb46983cd3fb19b4ca11cd328d"} Jan 22 06:09:36 crc kubenswrapper[4982]: I0122 06:09:36.800998 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-vqgqq" podStartSLOduration=2.800976743 podStartE2EDuration="2.800976743s" podCreationTimestamp="2026-01-22 06:09:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:36.747177497 +0000 UTC m=+1437.585815540" watchObservedRunningTime="2026-01-22 06:09:36.800976743 +0000 UTC m=+1437.639614766" Jan 22 06:09:36 crc kubenswrapper[4982]: I0122 06:09:36.801332 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:09:36 crc kubenswrapper[4982]: I0122 06:09:36.815899 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-qklw7"] Jan 22 06:09:37 crc kubenswrapper[4982]: I0122 06:09:37.764744 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9918bf38-2e78-4627-b996-05f42319fb53" path="/var/lib/kubelet/pods/9918bf38-2e78-4627-b996-05f42319fb53/volumes" Jan 22 06:09:37 crc kubenswrapper[4982]: I0122 06:09:37.769196 4982 generic.go:334] "Generic (PLEG): container finished" podID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerID="5b60d2e2a1d1289ffe0aaaf52a1f293fe69aaa614f406399bc38b2a1b58307f1" exitCode=0 Jan 22 06:09:37 crc kubenswrapper[4982]: I0122 06:09:37.770004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerDied","Data":"5b60d2e2a1d1289ffe0aaaf52a1f293fe69aaa614f406399bc38b2a1b58307f1"} Jan 22 06:09:37 crc kubenswrapper[4982]: I0122 06:09:37.855199 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:37 crc kubenswrapper[4982]: I0122 06:09:37.936996 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.113263 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.158655 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.205118 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.205585 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.205625 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.205645 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.205696 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.208667 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.218896 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts" (OuterVolumeSpecName: "scripts") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.233664 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.308070 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.310010 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.310120 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsbz6\" (UniqueName: \"kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.310321 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.310479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs\") pod \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\" (UID: \"d55e9b1f-cae3-4723-8230-fb22aaa6aed5\") " Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.311272 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.311302 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.311322 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.313052 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.315135 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6" (OuterVolumeSpecName: "kube-api-access-rsbz6") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "kube-api-access-rsbz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.380217 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.399800 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data" (OuterVolumeSpecName: "config-data") pod "d55e9b1f-cae3-4723-8230-fb22aaa6aed5" (UID: "d55e9b1f-cae3-4723-8230-fb22aaa6aed5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.412739 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.412813 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.412827 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.412836 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsbz6\" (UniqueName: \"kubernetes.io/projected/d55e9b1f-cae3-4723-8230-fb22aaa6aed5-kube-api-access-rsbz6\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.778052 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.778681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d55e9b1f-cae3-4723-8230-fb22aaa6aed5","Type":"ContainerDied","Data":"c310bd457c35713d030fc922a1560b330a0368a0520f3e9e5b80bfe6262bbca2"} Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.779177 4982 scope.go:117] "RemoveContainer" containerID="846dcac9568b27edafa0ec9c2c747d21c9625ae0f3f5309e075258d5a350f9da" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.811611 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.818512 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.822607 4982 scope.go:117] "RemoveContainer" containerID="d19c9f2f1d96ed1cae27f3d329cca0fef6df0367d61d6776ff92c575e67d3386" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840321 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840687 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="sg-core" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840704 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="sg-core" Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840722 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="dnsmasq-dns" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840731 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="dnsmasq-dns" Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840755 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="proxy-httpd" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840764 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="proxy-httpd" Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840777 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-central-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840783 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-central-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840796 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-notification-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840802 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-notification-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: E0122 06:09:38.840811 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="init" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.840817 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="init" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.841002 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-notification-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.841016 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="ceilometer-central-agent" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.841034 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="sg-core" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.841048 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" containerName="proxy-httpd" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.841058 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9918bf38-2e78-4627-b996-05f42319fb53" containerName="dnsmasq-dns" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.842594 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.844968 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.845076 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.845147 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.846821 4982 scope.go:117] "RemoveContainer" containerID="6da460f48dade095de85e52ee5e115773b95fc39355f4be2d9312925a4711356" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.871199 4982 scope.go:117] "RemoveContainer" containerID="5b60d2e2a1d1289ffe0aaaf52a1f293fe69aaa614f406399bc38b2a1b58307f1" Jan 22 06:09:38 crc kubenswrapper[4982]: I0122 06:09:38.872638 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.025734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.025835 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.025917 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.025972 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.026005 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnb89\" (UniqueName: \"kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.026054 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.026089 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.026146 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128151 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128186 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128207 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnb89\" (UniqueName: \"kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128245 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128273 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128315 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.128390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.129040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.131254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.133580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.137077 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.138169 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.138175 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.138968 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.149576 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnb89\" (UniqueName: \"kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89\") pod \"ceilometer-0\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.170691 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.639071 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.738058 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d55e9b1f-cae3-4723-8230-fb22aaa6aed5" path="/var/lib/kubelet/pods/d55e9b1f-cae3-4723-8230-fb22aaa6aed5/volumes" Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.789183 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerStarted","Data":"d96692ab4a1d200bcbd8c10460467ca5d15d0c37b9022e28e18ea1cad13649e0"} Jan 22 06:09:39 crc kubenswrapper[4982]: I0122 06:09:39.790947 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fqrnq" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="registry-server" containerID="cri-o://220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8" gracePeriod=2 Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.344373 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.455447 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content\") pod \"f885c65c-ba3d-498c-a094-4abbc117efdc\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.455610 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities\") pod \"f885c65c-ba3d-498c-a094-4abbc117efdc\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.455649 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkzdw\" (UniqueName: \"kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw\") pod \"f885c65c-ba3d-498c-a094-4abbc117efdc\" (UID: \"f885c65c-ba3d-498c-a094-4abbc117efdc\") " Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.456579 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities" (OuterVolumeSpecName: "utilities") pod "f885c65c-ba3d-498c-a094-4abbc117efdc" (UID: "f885c65c-ba3d-498c-a094-4abbc117efdc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.463770 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw" (OuterVolumeSpecName: "kube-api-access-dkzdw") pod "f885c65c-ba3d-498c-a094-4abbc117efdc" (UID: "f885c65c-ba3d-498c-a094-4abbc117efdc"). InnerVolumeSpecName "kube-api-access-dkzdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.557884 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.558429 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkzdw\" (UniqueName: \"kubernetes.io/projected/f885c65c-ba3d-498c-a094-4abbc117efdc-kube-api-access-dkzdw\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.577932 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f885c65c-ba3d-498c-a094-4abbc117efdc" (UID: "f885c65c-ba3d-498c-a094-4abbc117efdc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.660965 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f885c65c-ba3d-498c-a094-4abbc117efdc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.808048 4982 generic.go:334] "Generic (PLEG): container finished" podID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerID="220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8" exitCode=0 Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.811050 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fqrnq" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.811070 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerDied","Data":"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8"} Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.811109 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fqrnq" event={"ID":"f885c65c-ba3d-498c-a094-4abbc117efdc","Type":"ContainerDied","Data":"3643f87b38e5a5c216b86137f5e34f8b69f86191550c34a15ee53ee196c33f03"} Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.811128 4982 scope.go:117] "RemoveContainer" containerID="220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.830293 4982 generic.go:334] "Generic (PLEG): container finished" podID="113886a4-e806-4fd8-b678-f8051fa8b3ba" containerID="ac4f72c1c8b619faa864b7a4128f74ea5d0ae3cb46983cd3fb19b4ca11cd328d" exitCode=0 Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.830395 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vqgqq" event={"ID":"113886a4-e806-4fd8-b678-f8051fa8b3ba","Type":"ContainerDied","Data":"ac4f72c1c8b619faa864b7a4128f74ea5d0ae3cb46983cd3fb19b4ca11cd328d"} Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.839504 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerStarted","Data":"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd"} Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.924016 4982 scope.go:117] "RemoveContainer" containerID="131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693" Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.924437 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.935281 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fqrnq"] Jan 22 06:09:40 crc kubenswrapper[4982]: I0122 06:09:40.994823 4982 scope.go:117] "RemoveContainer" containerID="124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.032758 4982 scope.go:117] "RemoveContainer" containerID="220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8" Jan 22 06:09:41 crc kubenswrapper[4982]: E0122 06:09:41.033832 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8\": container with ID starting with 220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8 not found: ID does not exist" containerID="220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.033890 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8"} err="failed to get container status \"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8\": rpc error: code = NotFound desc = could not find container \"220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8\": container with ID starting with 220f4b283b3f9a517a803b7ddf7eab12d76be773c8d7870ec22e070be16fddc8 not found: ID does not exist" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.033917 4982 scope.go:117] "RemoveContainer" containerID="131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693" Jan 22 06:09:41 crc kubenswrapper[4982]: E0122 06:09:41.034253 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693\": container with ID starting with 131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693 not found: ID does not exist" containerID="131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.034271 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693"} err="failed to get container status \"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693\": rpc error: code = NotFound desc = could not find container \"131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693\": container with ID starting with 131adb6c781ce68e1766d68430c19ef8432eb18ef963f8f4025af704966f7693 not found: ID does not exist" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.034283 4982 scope.go:117] "RemoveContainer" containerID="124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08" Jan 22 06:09:41 crc kubenswrapper[4982]: E0122 06:09:41.034630 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08\": container with ID starting with 124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08 not found: ID does not exist" containerID="124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.034681 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08"} err="failed to get container status \"124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08\": rpc error: code = NotFound desc = could not find container \"124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08\": container with ID starting with 124481cd899bded3e4740e2305a0c0d1adb8d057885a374b36c160e3f8fede08 not found: ID does not exist" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.729095 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" path="/var/lib/kubelet/pods/f885c65c-ba3d-498c-a094-4abbc117efdc/volumes" Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.853020 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerStarted","Data":"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594"} Jan 22 06:09:41 crc kubenswrapper[4982]: I0122 06:09:41.853079 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerStarted","Data":"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6"} Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.284041 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.311173 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts\") pod \"113886a4-e806-4fd8-b678-f8051fa8b3ba\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.311641 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvggm\" (UniqueName: \"kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm\") pod \"113886a4-e806-4fd8-b678-f8051fa8b3ba\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.311988 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data\") pod \"113886a4-e806-4fd8-b678-f8051fa8b3ba\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.312486 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle\") pod \"113886a4-e806-4fd8-b678-f8051fa8b3ba\" (UID: \"113886a4-e806-4fd8-b678-f8051fa8b3ba\") " Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.317070 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm" (OuterVolumeSpecName: "kube-api-access-vvggm") pod "113886a4-e806-4fd8-b678-f8051fa8b3ba" (UID: "113886a4-e806-4fd8-b678-f8051fa8b3ba"). InnerVolumeSpecName "kube-api-access-vvggm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.318039 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts" (OuterVolumeSpecName: "scripts") pod "113886a4-e806-4fd8-b678-f8051fa8b3ba" (UID: "113886a4-e806-4fd8-b678-f8051fa8b3ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.321505 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.322738 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.350550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data" (OuterVolumeSpecName: "config-data") pod "113886a4-e806-4fd8-b678-f8051fa8b3ba" (UID: "113886a4-e806-4fd8-b678-f8051fa8b3ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.359912 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "113886a4-e806-4fd8-b678-f8051fa8b3ba" (UID: "113886a4-e806-4fd8-b678-f8051fa8b3ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.413978 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvggm\" (UniqueName: \"kubernetes.io/projected/113886a4-e806-4fd8-b678-f8051fa8b3ba-kube-api-access-vvggm\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.414013 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.414024 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.414032 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/113886a4-e806-4fd8-b678-f8051fa8b3ba-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.866930 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vqgqq" Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.866976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vqgqq" event={"ID":"113886a4-e806-4fd8-b678-f8051fa8b3ba","Type":"ContainerDied","Data":"074cfeb37e0979f7ba42891766fac5e5a9428faa37dbf2dd0554381c9d3e0aec"} Jan 22 06:09:42 crc kubenswrapper[4982]: I0122 06:09:42.867073 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="074cfeb37e0979f7ba42891766fac5e5a9428faa37dbf2dd0554381c9d3e0aec" Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.078546 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.097952 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.098442 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7c10441d-8606-4622-b83f-033f239987e8" containerName="nova-scheduler-scheduler" containerID="cri-o://9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" gracePeriod=30 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.135788 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.136068 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" containerID="cri-o://e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24" gracePeriod=30 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.136500 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" containerID="cri-o://0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3" gracePeriod=30 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.342059 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.342084 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.875478 4982 generic.go:334] "Generic (PLEG): container finished" podID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerID="e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24" exitCode=143 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.875582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerDied","Data":"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24"} Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.877835 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerStarted","Data":"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374"} Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.877934 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-log" containerID="cri-o://ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca" gracePeriod=30 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.877987 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.878288 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-api" containerID="cri-o://52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8" gracePeriod=30 Jan 22 06:09:43 crc kubenswrapper[4982]: I0122 06:09:43.908124 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.651765333 podStartE2EDuration="5.908102357s" podCreationTimestamp="2026-01-22 06:09:38 +0000 UTC" firstStartedPulling="2026-01-22 06:09:39.648284174 +0000 UTC m=+1440.486922177" lastFinishedPulling="2026-01-22 06:09:42.904621188 +0000 UTC m=+1443.743259201" observedRunningTime="2026-01-22 06:09:43.905739885 +0000 UTC m=+1444.744377888" watchObservedRunningTime="2026-01-22 06:09:43.908102357 +0000 UTC m=+1444.746740390" Jan 22 06:09:43 crc kubenswrapper[4982]: E0122 06:09:43.985437 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:43 crc kubenswrapper[4982]: E0122 06:09:43.986741 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:43 crc kubenswrapper[4982]: E0122 06:09:43.989788 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 06:09:43 crc kubenswrapper[4982]: E0122 06:09:43.989894 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7c10441d-8606-4622-b83f-033f239987e8" containerName="nova-scheduler-scheduler" Jan 22 06:09:44 crc kubenswrapper[4982]: I0122 06:09:44.889089 4982 generic.go:334] "Generic (PLEG): container finished" podID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerID="ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca" exitCode=143 Jan 22 06:09:44 crc kubenswrapper[4982]: I0122 06:09:44.889168 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerDied","Data":"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca"} Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.297666 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:38920->10.217.0.193:8775: read: connection reset by peer" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.297730 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:38916->10.217.0.193:8775: read: connection reset by peer" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.831995 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.894293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs\") pod \"3645e707-91be-42e5-a475-2eb6ec3fb433\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.894402 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf6hc\" (UniqueName: \"kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc\") pod \"3645e707-91be-42e5-a475-2eb6ec3fb433\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.894452 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data\") pod \"3645e707-91be-42e5-a475-2eb6ec3fb433\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.894515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle\") pod \"3645e707-91be-42e5-a475-2eb6ec3fb433\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.894576 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs\") pod \"3645e707-91be-42e5-a475-2eb6ec3fb433\" (UID: \"3645e707-91be-42e5-a475-2eb6ec3fb433\") " Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.895525 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs" (OuterVolumeSpecName: "logs") pod "3645e707-91be-42e5-a475-2eb6ec3fb433" (UID: "3645e707-91be-42e5-a475-2eb6ec3fb433"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.907887 4982 generic.go:334] "Generic (PLEG): container finished" podID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerID="0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3" exitCode=0 Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.908245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerDied","Data":"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3"} Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.908278 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3645e707-91be-42e5-a475-2eb6ec3fb433","Type":"ContainerDied","Data":"b9de301df8546493e24acddc1699f9edcd6f6d28a84cbdc3c46bfa4f2405b086"} Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.908299 4982 scope.go:117] "RemoveContainer" containerID="0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.908438 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.939028 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc" (OuterVolumeSpecName: "kube-api-access-hf6hc") pod "3645e707-91be-42e5-a475-2eb6ec3fb433" (UID: "3645e707-91be-42e5-a475-2eb6ec3fb433"). InnerVolumeSpecName "kube-api-access-hf6hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.941176 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3645e707-91be-42e5-a475-2eb6ec3fb433" (UID: "3645e707-91be-42e5-a475-2eb6ec3fb433"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.943071 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data" (OuterVolumeSpecName: "config-data") pod "3645e707-91be-42e5-a475-2eb6ec3fb433" (UID: "3645e707-91be-42e5-a475-2eb6ec3fb433"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.964835 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3645e707-91be-42e5-a475-2eb6ec3fb433" (UID: "3645e707-91be-42e5-a475-2eb6ec3fb433"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.996116 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.996149 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3645e707-91be-42e5-a475-2eb6ec3fb433-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.996161 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.996173 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf6hc\" (UniqueName: \"kubernetes.io/projected/3645e707-91be-42e5-a475-2eb6ec3fb433-kube-api-access-hf6hc\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:46 crc kubenswrapper[4982]: I0122 06:09:46.996184 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3645e707-91be-42e5-a475-2eb6ec3fb433-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.010520 4982 scope.go:117] "RemoveContainer" containerID="e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.044521 4982 scope.go:117] "RemoveContainer" containerID="0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.044958 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3\": container with ID starting with 0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3 not found: ID does not exist" containerID="0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.044989 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3"} err="failed to get container status \"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3\": rpc error: code = NotFound desc = could not find container \"0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3\": container with ID starting with 0c9ccdc23f43b1bf3c0d74a8692311a6076c288ef0848ddd60f988b930a58ba3 not found: ID does not exist" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.045009 4982 scope.go:117] "RemoveContainer" containerID="e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.045253 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24\": container with ID starting with e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24 not found: ID does not exist" containerID="e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.045281 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24"} err="failed to get container status \"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24\": rpc error: code = NotFound desc = could not find container \"e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24\": container with ID starting with e23ba4bce99c12647cb53c902ff74233634365cebd04a3984359e39265010a24 not found: ID does not exist" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.244321 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.252209 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272116 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272524 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113886a4-e806-4fd8-b678-f8051fa8b3ba" containerName="nova-manage" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272540 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="113886a4-e806-4fd8-b678-f8051fa8b3ba" containerName="nova-manage" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272560 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="registry-server" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272566 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="registry-server" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272578 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272587 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272599 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="extract-content" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272605 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="extract-content" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272620 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="extract-utilities" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272626 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="extract-utilities" Jan 22 06:09:47 crc kubenswrapper[4982]: E0122 06:09:47.272633 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272641 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272798 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-log" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272820 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" containerName="nova-metadata-metadata" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272827 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="113886a4-e806-4fd8-b678-f8051fa8b3ba" containerName="nova-manage" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.272842 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f885c65c-ba3d-498c-a094-4abbc117efdc" containerName="registry-server" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.273739 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.277027 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.278285 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.289254 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.300242 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.300284 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.300343 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.300425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.300485 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2g6c\" (UniqueName: \"kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402293 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2g6c\" (UniqueName: \"kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402412 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.402832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.405940 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.406000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.406397 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.423588 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2g6c\" (UniqueName: \"kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c\") pod \"nova-metadata-0\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.590158 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:09:47 crc kubenswrapper[4982]: I0122 06:09:47.758388 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3645e707-91be-42e5-a475-2eb6ec3fb433" path="/var/lib/kubelet/pods/3645e707-91be-42e5-a475-2eb6ec3fb433/volumes" Jan 22 06:09:48 crc kubenswrapper[4982]: W0122 06:09:48.091751 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53013384_1492_4c2e_9c7a_cd81d4d07018.slice/crio-999b2f976f3b75a88d1743b252bb3a98cab63017dcb130885c70d48f61d72de6 WatchSource:0}: Error finding container 999b2f976f3b75a88d1743b252bb3a98cab63017dcb130885c70d48f61d72de6: Status 404 returned error can't find the container with id 999b2f976f3b75a88d1743b252bb3a98cab63017dcb130885c70d48f61d72de6 Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.091874 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.541152 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.657638 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data\") pod \"7c10441d-8606-4622-b83f-033f239987e8\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.658136 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle\") pod \"7c10441d-8606-4622-b83f-033f239987e8\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.658191 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8qw9\" (UniqueName: \"kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9\") pod \"7c10441d-8606-4622-b83f-033f239987e8\" (UID: \"7c10441d-8606-4622-b83f-033f239987e8\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.666089 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9" (OuterVolumeSpecName: "kube-api-access-d8qw9") pod "7c10441d-8606-4622-b83f-033f239987e8" (UID: "7c10441d-8606-4622-b83f-033f239987e8"). InnerVolumeSpecName "kube-api-access-d8qw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.690905 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data" (OuterVolumeSpecName: "config-data") pod "7c10441d-8606-4622-b83f-033f239987e8" (UID: "7c10441d-8606-4622-b83f-033f239987e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.694731 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c10441d-8606-4622-b83f-033f239987e8" (UID: "7c10441d-8606-4622-b83f-033f239987e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.760884 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.760917 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8qw9\" (UniqueName: \"kubernetes.io/projected/7c10441d-8606-4622-b83f-033f239987e8-kube-api-access-d8qw9\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.760931 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10441d-8606-4622-b83f-033f239987e8-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.785654 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862254 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgvlk\" (UniqueName: \"kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862376 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862416 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862432 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.862484 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data\") pod \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\" (UID: \"7b4399a2-89c9-4b88-8e2e-e0673539ecf6\") " Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.863152 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs" (OuterVolumeSpecName: "logs") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.871118 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk" (OuterVolumeSpecName: "kube-api-access-sgvlk") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "kube-api-access-sgvlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.905071 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.916871 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data" (OuterVolumeSpecName: "config-data") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.929140 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.938452 4982 generic.go:334] "Generic (PLEG): container finished" podID="7c10441d-8606-4622-b83f-033f239987e8" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" exitCode=0 Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.938547 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c10441d-8606-4622-b83f-033f239987e8","Type":"ContainerDied","Data":"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.938608 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c10441d-8606-4622-b83f-033f239987e8","Type":"ContainerDied","Data":"e8881cb077d9a8065f100c2883ab3dca45770bdbdd334db9e40a6e91a4156ac5"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.938630 4982 scope.go:117] "RemoveContainer" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.938798 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.944216 4982 generic.go:334] "Generic (PLEG): container finished" podID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerID="52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8" exitCode=0 Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.944292 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerDied","Data":"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.944311 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.944319 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b4399a2-89c9-4b88-8e2e-e0673539ecf6","Type":"ContainerDied","Data":"c09e1e1e8fa9101fefd875cd7ad105ed02f64acc13214a5e6e9f2c89799471e6"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.949604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerStarted","Data":"a49fa6161cdc0ad0eb335a0a25470144a700d11c752945b8fa21ab70997554c3"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.949631 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerStarted","Data":"acd9ebde3d0e0f8fe4696b287db22cd21891b45ba0969952821a0ef3dc3ecee6"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.949642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerStarted","Data":"999b2f976f3b75a88d1743b252bb3a98cab63017dcb130885c70d48f61d72de6"} Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.959974 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7b4399a2-89c9-4b88-8e2e-e0673539ecf6" (UID: "7b4399a2-89c9-4b88-8e2e-e0673539ecf6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964576 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964613 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgvlk\" (UniqueName: \"kubernetes.io/projected/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-kube-api-access-sgvlk\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964627 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964641 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964651 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.964662 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b4399a2-89c9-4b88-8e2e-e0673539ecf6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.970456 4982 scope.go:117] "RemoveContainer" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" Jan 22 06:09:48 crc kubenswrapper[4982]: E0122 06:09:48.971261 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e\": container with ID starting with 9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e not found: ID does not exist" containerID="9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.971296 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e"} err="failed to get container status \"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e\": rpc error: code = NotFound desc = could not find container \"9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e\": container with ID starting with 9a19b6610b0b565ba4925432db7983dfff1232c3922a9e19709722655257528e not found: ID does not exist" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.971319 4982 scope.go:117] "RemoveContainer" containerID="52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.973733 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.973778 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.980337 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.980313997 podStartE2EDuration="1.980313997s" podCreationTimestamp="2026-01-22 06:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:48.963103363 +0000 UTC m=+1449.801741366" watchObservedRunningTime="2026-01-22 06:09:48.980313997 +0000 UTC m=+1449.818952000" Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.995139 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:48 crc kubenswrapper[4982]: I0122 06:09:48.999458 4982 scope.go:117] "RemoveContainer" containerID="ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.003947 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015238 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: E0122 06:09:49.015630 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-log" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015643 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-log" Jan 22 06:09:49 crc kubenswrapper[4982]: E0122 06:09:49.015666 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-api" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015672 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-api" Jan 22 06:09:49 crc kubenswrapper[4982]: E0122 06:09:49.015696 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c10441d-8606-4622-b83f-033f239987e8" containerName="nova-scheduler-scheduler" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015704 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c10441d-8606-4622-b83f-033f239987e8" containerName="nova-scheduler-scheduler" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015889 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-api" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015920 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c10441d-8606-4622-b83f-033f239987e8" containerName="nova-scheduler-scheduler" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.015930 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" containerName="nova-api-log" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.016515 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.019423 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.025552 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.041511 4982 scope.go:117] "RemoveContainer" containerID="52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8" Jan 22 06:09:49 crc kubenswrapper[4982]: E0122 06:09:49.041996 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8\": container with ID starting with 52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8 not found: ID does not exist" containerID="52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.042024 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8"} err="failed to get container status \"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8\": rpc error: code = NotFound desc = could not find container \"52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8\": container with ID starting with 52fc67ea4981a2fc40d9fee820d2cc30a32d6182236631d0777c56500713c0b8 not found: ID does not exist" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.042045 4982 scope.go:117] "RemoveContainer" containerID="ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca" Jan 22 06:09:49 crc kubenswrapper[4982]: E0122 06:09:49.042336 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca\": container with ID starting with ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca not found: ID does not exist" containerID="ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.042363 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca"} err="failed to get container status \"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca\": rpc error: code = NotFound desc = could not find container \"ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca\": container with ID starting with ae09bda4006f6c08ed050a5819535ae081dd99d7a95d5115ad4d83488b4c60ca not found: ID does not exist" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.066775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.067014 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsnfh\" (UniqueName: \"kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.067137 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.169083 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsnfh\" (UniqueName: \"kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.169127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.169275 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.174070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.177067 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.192598 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsnfh\" (UniqueName: \"kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh\") pod \"nova-scheduler-0\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.278701 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.288815 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.300614 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.302608 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.305578 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.305919 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.307808 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.311072 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.337472 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373482 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2r6w\" (UniqueName: \"kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373584 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373642 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373710 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.373775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476079 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2r6w\" (UniqueName: \"kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476347 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.476718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.477417 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.480643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.480830 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.481036 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.482349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.492923 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2r6w\" (UniqueName: \"kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w\") pod \"nova-api-0\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.620813 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.731096 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4399a2-89c9-4b88-8e2e-e0673539ecf6" path="/var/lib/kubelet/pods/7b4399a2-89c9-4b88-8e2e-e0673539ecf6/volumes" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.732065 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c10441d-8606-4622-b83f-033f239987e8" path="/var/lib/kubelet/pods/7c10441d-8606-4622-b83f-033f239987e8/volumes" Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.810871 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:09:49 crc kubenswrapper[4982]: W0122 06:09:49.820171 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e2e1a17_8278_4fe9_a83e_aeb0a61c69df.slice/crio-12a2afa7d6de99f0270cfee379971747ac5d3b5cf3711176012880b96a95c85d WatchSource:0}: Error finding container 12a2afa7d6de99f0270cfee379971747ac5d3b5cf3711176012880b96a95c85d: Status 404 returned error can't find the container with id 12a2afa7d6de99f0270cfee379971747ac5d3b5cf3711176012880b96a95c85d Jan 22 06:09:49 crc kubenswrapper[4982]: I0122 06:09:49.972962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df","Type":"ContainerStarted","Data":"12a2afa7d6de99f0270cfee379971747ac5d3b5cf3711176012880b96a95c85d"} Jan 22 06:09:50 crc kubenswrapper[4982]: I0122 06:09:50.247045 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:09:50 crc kubenswrapper[4982]: I0122 06:09:50.990746 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df","Type":"ContainerStarted","Data":"98cf6e03c88eac06c166fdc5c9a1486e6fc1f8e13aa0eff354dcd9303c5e399f"} Jan 22 06:09:50 crc kubenswrapper[4982]: I0122 06:09:50.996082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerStarted","Data":"cefbf932aefddcd806c99682cdfc21a0e80875bc3e02c87761c6e4e7b2e2288e"} Jan 22 06:09:50 crc kubenswrapper[4982]: I0122 06:09:50.996119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerStarted","Data":"261f0791c2831229e062f5721d4579eac13d766cc88596aa36038f23c84e7149"} Jan 22 06:09:50 crc kubenswrapper[4982]: I0122 06:09:50.996129 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerStarted","Data":"c98db23ad978aec75df3796c99eca0103d2811f3f463ff7edded7cbaca2205c9"} Jan 22 06:09:51 crc kubenswrapper[4982]: I0122 06:09:51.025953 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.025934183 podStartE2EDuration="3.025934183s" podCreationTimestamp="2026-01-22 06:09:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:51.005613659 +0000 UTC m=+1451.844251662" watchObservedRunningTime="2026-01-22 06:09:51.025934183 +0000 UTC m=+1451.864572186" Jan 22 06:09:51 crc kubenswrapper[4982]: I0122 06:09:51.037669 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.037647962 podStartE2EDuration="2.037647962s" podCreationTimestamp="2026-01-22 06:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:09:51.026105108 +0000 UTC m=+1451.864743111" watchObservedRunningTime="2026-01-22 06:09:51.037647962 +0000 UTC m=+1451.876285965" Jan 22 06:09:52 crc kubenswrapper[4982]: I0122 06:09:52.590939 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 06:09:52 crc kubenswrapper[4982]: I0122 06:09:52.591281 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 06:09:54 crc kubenswrapper[4982]: I0122 06:09:54.338037 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 06:09:57 crc kubenswrapper[4982]: I0122 06:09:57.591115 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 06:09:57 crc kubenswrapper[4982]: I0122 06:09:57.591769 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 06:09:58 crc kubenswrapper[4982]: I0122 06:09:58.603986 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:58 crc kubenswrapper[4982]: I0122 06:09:58.604070 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:09:59 crc kubenswrapper[4982]: I0122 06:09:59.338612 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 06:09:59 crc kubenswrapper[4982]: I0122 06:09:59.362108 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 06:09:59 crc kubenswrapper[4982]: I0122 06:09:59.841342 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:09:59 crc kubenswrapper[4982]: I0122 06:09:59.841374 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 06:10:00 crc kubenswrapper[4982]: I0122 06:10:00.140246 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 06:10:00 crc kubenswrapper[4982]: I0122 06:10:00.850090 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:10:00 crc kubenswrapper[4982]: I0122 06:10:00.850120 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 06:10:07 crc kubenswrapper[4982]: I0122 06:10:07.599376 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 06:10:07 crc kubenswrapper[4982]: I0122 06:10:07.600068 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 06:10:07 crc kubenswrapper[4982]: I0122 06:10:07.613336 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 06:10:07 crc kubenswrapper[4982]: I0122 06:10:07.614078 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 06:10:09 crc kubenswrapper[4982]: I0122 06:10:09.185527 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 06:10:09 crc kubenswrapper[4982]: I0122 06:10:09.637116 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 06:10:09 crc kubenswrapper[4982]: I0122 06:10:09.638464 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 06:10:09 crc kubenswrapper[4982]: I0122 06:10:09.645614 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 06:10:09 crc kubenswrapper[4982]: I0122 06:10:09.651623 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 06:10:10 crc kubenswrapper[4982]: I0122 06:10:10.211444 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 06:10:10 crc kubenswrapper[4982]: I0122 06:10:10.216573 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 06:10:18 crc kubenswrapper[4982]: I0122 06:10:18.973732 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:10:18 crc kubenswrapper[4982]: I0122 06:10:18.974432 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:10:18 crc kubenswrapper[4982]: I0122 06:10:18.974497 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:10:18 crc kubenswrapper[4982]: I0122 06:10:18.975521 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:10:18 crc kubenswrapper[4982]: I0122 06:10:18.975683 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771" gracePeriod=600 Jan 22 06:10:19 crc kubenswrapper[4982]: I0122 06:10:19.307065 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771" exitCode=0 Jan 22 06:10:19 crc kubenswrapper[4982]: I0122 06:10:19.307148 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771"} Jan 22 06:10:19 crc kubenswrapper[4982]: I0122 06:10:19.307640 4982 scope.go:117] "RemoveContainer" containerID="e9300e314dc7d0c41bd73af6871e2c53b183fc3419e142f570e6c588d56f45bc" Jan 22 06:10:20 crc kubenswrapper[4982]: I0122 06:10:20.315773 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2"} Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.102032 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.102949 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" containerName="openstackclient" containerID="cri-o://c5436ddb9b05921d7b248ae7c45b009c65f9ad0e3d87b2a6e20de5649be61f53" gracePeriod=2 Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.131468 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.242618 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.292913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.293746 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="openstack-network-exporter" containerID="cri-o://7ba38d343082ea5a0fb5e591849b42a158f94d9fd61d91a4c98690174b1433f4" gracePeriod=300 Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.314914 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-4mft6"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.339068 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-4mft6"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.356919 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.357181 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="ovn-northd" containerID="cri-o://752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" gracePeriod=30 Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.357324 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="openstack-network-exporter" containerID="cri-o://a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec" gracePeriod=30 Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.374847 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.374918 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data podName:7623a66b-412d-4202-bd05-58fba1c6a3d3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:32.874897611 +0000 UTC m=+1493.713535604 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data") pod "rabbitmq-server-0" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3") : configmap "rabbitmq-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.375471 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.375786 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" containerName="openstackclient" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.375801 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" containerName="openstackclient" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.376014 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" containerName="openstackclient" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.376555 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.383270 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.413214 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.444254 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.445637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.451054 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.476610 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.476930 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wbmf\" (UniqueName: \"kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.477023 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-962fk\" (UniqueName: \"kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.477059 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.486662 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.591020 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="ovsdbserver-sb" containerID="cri-o://44180703ae3b87e41617dc3bc742fc8099ef79e67de1a6b50b2b21f0fdc9f2bc" gracePeriod=300 Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.629406 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.629487 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wbmf\" (UniqueName: \"kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.629736 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-962fk\" (UniqueName: \"kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.629814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.630681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.632328 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.660695 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a8f0-account-create-update-2nvzk"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.669490 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-962fk\" (UniqueName: \"kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk\") pod \"root-account-create-update-b7g85\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.671294 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wbmf\" (UniqueName: \"kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf\") pod \"barbican-a8f0-account-create-update-nclgk\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.728071 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-a8f0-account-create-update-2nvzk"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.750348 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.766928 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.768569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.770240 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.782636 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.784909 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-vtfmh"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.793461 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4641-account-create-update-dtcq4"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.811967 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.840693 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4641-account-create-update-dtcq4"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.880026 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.881682 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.899479 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.914330 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.918596 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-vtfmh"] Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.935512 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-885d6\" (UniqueName: \"kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.935551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.940677 4982 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.940740 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config podName:98d29241-70c5-488d-826a-c37971689e5c nodeName:}" failed. No retries permitted until 2026-01-22 06:10:33.440723723 +0000 UTC m=+1494.279361726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config") pod "ovn-controller-metrics-w7b5x" (UID: "98d29241-70c5-488d-826a-c37971689e5c") : configmap "ovncontroller-metrics-config" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942440 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942474 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data podName:bb198f24-94e6-4569-be12-9ee57000a3e3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:33.44246602 +0000 UTC m=+1494.281104023 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data") pod "rabbitmq-cell1-server-0" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942508 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-scripts: configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942524 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:33.442519001 +0000 UTC m=+1494.281156994 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942549 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942565 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data podName:7623a66b-412d-4202-bd05-58fba1c6a3d3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:33.942560592 +0000 UTC m=+1494.781198605 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data") pod "rabbitmq-server-0" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3") : configmap "rabbitmq-config-data" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942885 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-config: configmap "ovndbcluster-nb-config" not found Jan 22 06:10:32 crc kubenswrapper[4982]: E0122 06:10:32.942953 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:33.442934902 +0000 UTC m=+1494.281572975 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-config" not found Jan 22 06:10:32 crc kubenswrapper[4982]: I0122 06:10:32.979223 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-j4fnn"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.014934 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.053327 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-j4fnn"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.054482 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmbp6\" (UniqueName: \"kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.054532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-885d6\" (UniqueName: \"kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.054568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.054611 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.055761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.098477 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a9e6-account-create-update-nmnpj"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.115469 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-885d6\" (UniqueName: \"kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6\") pod \"nova-api-a886-account-create-update-l7kpg\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.117048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.127394 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a9e6-account-create-update-nmnpj"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.130640 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.161480 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.176929 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.182821 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.183152 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmbp6\" (UniqueName: \"kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.184144 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.250924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmbp6\" (UniqueName: \"kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6\") pod \"nova-cell0-777f-account-create-update-ssrcg\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.264916 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.265196 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="dnsmasq-dns" containerID="cri-o://c57a66ba5563b4184480525b497fbf6412201b1822a40a8b542aa66de3ec6598" gracePeriod=10 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.292835 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.307016 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.307106 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.320618 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.322319 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a886-account-create-update-5tlsd"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.367065 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a886-account-create-update-5tlsd"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.404551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.404828 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n78hz\" (UniqueName: \"kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.405040 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-z7b8c"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.497434 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-z7b8c"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.506129 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n78hz\" (UniqueName: \"kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.506341 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506425 4982 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506461 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config podName:98d29241-70c5-488d-826a-c37971689e5c nodeName:}" failed. No retries permitted until 2026-01-22 06:10:34.506448303 +0000 UTC m=+1495.345086306 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config") pod "ovn-controller-metrics-w7b5x" (UID: "98d29241-70c5-488d-826a-c37971689e5c") : configmap "ovncontroller-metrics-config" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506476 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-scripts: configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506505 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:34.506495815 +0000 UTC m=+1495.345133818 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506531 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506549 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data podName:bb198f24-94e6-4569-be12-9ee57000a3e3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:34.506542926 +0000 UTC m=+1495.345180919 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data") pod "rabbitmq-cell1-server-0" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506676 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-config: configmap "ovndbcluster-nb-config" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.506697 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:34.50669022 +0000 UTC m=+1495.345328223 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-config" not found Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.507240 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.529450 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_abef47d7-89c0-4c16-8474-8db3d2d127e7/ovsdbserver-sb/0.log" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.529492 4982 generic.go:334] "Generic (PLEG): container finished" podID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerID="7ba38d343082ea5a0fb5e591849b42a158f94d9fd61d91a4c98690174b1433f4" exitCode=2 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.529506 4982 generic.go:334] "Generic (PLEG): container finished" podID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerID="44180703ae3b87e41617dc3bc742fc8099ef79e67de1a6b50b2b21f0fdc9f2bc" exitCode=143 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.529546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerDied","Data":"7ba38d343082ea5a0fb5e591849b42a158f94d9fd61d91a4c98690174b1433f4"} Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.529571 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerDied","Data":"44180703ae3b87e41617dc3bc742fc8099ef79e67de1a6b50b2b21f0fdc9f2bc"} Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.538234 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.559808 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n78hz\" (UniqueName: \"kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz\") pod \"nova-cell1-bda6-account-create-update-jwx8g\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.562127 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.582175 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-r5m24"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.588712 4982 generic.go:334] "Generic (PLEG): container finished" podID="0ce93171-ff81-4a46-9813-2807930a945c" containerID="a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec" exitCode=2 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.589015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerDied","Data":"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec"} Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.589113 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-w7b5x" podUID="98d29241-70c5-488d-826a-c37971689e5c" containerName="openstack-network-exporter" containerID="cri-o://62e9a0e84f780bc9d5109a47a1a3b4b18ffc53274df69cc65efda0d894c70ec0" gracePeriod=30 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.589447 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="openstack-network-exporter" containerID="cri-o://2e5d780276258df492d4681170a8192ba89d1859402687fc4d4b1699560bb858" gracePeriod=300 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.613645 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-r5m24"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.640426 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-gms7h"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.652620 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.656895 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-gms7h"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.673013 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-p5xg6"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.740645 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-p5xg6"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.742040 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="ovsdbserver-nb" containerID="cri-o://44293f881aeb45c5c60b9c080fb0120c9f3a061e77ce519b40e24281772cc8b3" gracePeriod=300 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.837670 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f97eee4-4c10-4f1e-8173-2f8d1c955049" path="/var/lib/kubelet/pods/1f97eee4-4c10-4f1e-8173-2f8d1c955049/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.838823 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f867733-84f1-4d24-8b1d-a46f5f9a6ff9" path="/var/lib/kubelet/pods/2f867733-84f1-4d24-8b1d-a46f5f9a6ff9/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.880294 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c49756b-5f06-470b-9bc5-281b5bfbb198" path="/var/lib/kubelet/pods/4c49756b-5f06-470b-9bc5-281b5bfbb198/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.881239 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cae5a46-3cc2-4b42-bf1b-4c429c489f81" path="/var/lib/kubelet/pods/4cae5a46-3cc2-4b42-bf1b-4c429c489f81/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.881726 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5263bf07-e4f4-41e6-8304-748716faf10c" path="/var/lib/kubelet/pods/5263bf07-e4f4-41e6-8304-748716faf10c/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.901315 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59ce449e-073f-4c66-ade6-fa8448573827" path="/var/lib/kubelet/pods/59ce449e-073f-4c66-ade6-fa8448573827/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.901879 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6279d28e-2c9c-4e83-9db9-db105f664da4" path="/var/lib/kubelet/pods/6279d28e-2c9c-4e83-9db9-db105f664da4/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.902393 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="889acc8d-10d5-490f-b9b7-d0514b16e62f" path="/var/lib/kubelet/pods/889acc8d-10d5-490f-b9b7-d0514b16e62f/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.919001 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae2b03b5-a7a9-4da1-bf23-8b589c508d26" path="/var/lib/kubelet/pods/ae2b03b5-a7a9-4da1-bf23-8b589c508d26/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.919552 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ab262f-f627-4891-9fd7-efc5e0e8a2eb" path="/var/lib/kubelet/pods/d4ab262f-f627-4891-9fd7-efc5e0e8a2eb/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920144 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f985360b-8866-457a-953d-2d15a1e2d20e" path="/var/lib/kubelet/pods/f985360b-8866-457a-953d-2d15a1e2d20e/volumes" Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920651 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-rsp2k"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920676 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-rsp2k"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920722 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920735 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.920950 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="cinder-scheduler" containerID="cri-o://f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427" gracePeriod=30 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.921432 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="probe" containerID="cri-o://a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d" gracePeriod=30 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.935115 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-664cd4b9c4-ccktp" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-log" containerID="cri-o://c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897" gracePeriod=30 Jan 22 06:10:33 crc kubenswrapper[4982]: I0122 06:10:33.935266 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-664cd4b9c4-ccktp" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-api" containerID="cri-o://eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03" gracePeriod=30 Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.982040 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 06:10:33 crc kubenswrapper[4982]: E0122 06:10:33.982117 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data podName:7623a66b-412d-4202-bd05-58fba1c6a3d3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:35.982084931 +0000 UTC m=+1496.820722934 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data") pod "rabbitmq-server-0" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3") : configmap "rabbitmq-config-data" not found Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.007077 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.035223 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-r66tz"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.035514 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api-log" containerID="cri-o://08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.036032 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api" containerID="cri-o://9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.057668 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-r66tz"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.112735 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.120460 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-server" containerID="cri-o://78cb51cdcbb32c7c566dd440fe3eddf657c1d8fa7d725c7170c6f86381854c75" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121287 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="swift-recon-cron" containerID="cri-o://21acccb66fe5a7601f47f21bf74eb39ca16b5d4458bb059e53d0b376d47e4e6e" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121392 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="rsync" containerID="cri-o://b878ca638cdba06e768d1e1407882909e1a9532997c7390f2d2cc87458ce86aa" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121437 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-expirer" containerID="cri-o://310a52e8c276d358ad53746988709b191603600640992af9703c7c83f46a3101" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121470 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-updater" containerID="cri-o://a386b99b03239bfd525b9cda2c117bcba26367449801201fb1b1a6f2e03e42aa" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121558 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-auditor" containerID="cri-o://45257d2dda5df21dd5c33ca01b27b1606dde8ba4b98ce40f6c5bf2094829e0de" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121624 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-replicator" containerID="cri-o://2d157d8183c5ba1a3484d5097ce18a1c0be897ffeadde7df10cfd7adde58b171" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.121689 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-server" containerID="cri-o://e4748ea7b210414126713179ef219e82a0abc3f56fde6ec484724799d53bea45" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124591 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-server" containerID="cri-o://f0e4c628f7ebdabca21ce9e2c5341af6fc155bfe4b2913c68c56bd267c6a4629" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124723 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-updater" containerID="cri-o://426ef7c3195021e87134941727b96e87ac5a055405603c85838becb9891299c1" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124760 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-auditor" containerID="cri-o://f4db4555825c7a98050d3fa64f127578b1f2415604cf41cb3cb729a79fc23f72" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124790 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-replicator" containerID="cri-o://be63c0d0c6adf69366e7b0baa98c3881df8d90e7905a24b623cae778f04a2eed" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124833 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-auditor" containerID="cri-o://95bd78bf0085807f031d31a6e3f8ecd58ff306054033420b516a89b6c828e39b" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124883 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-reaper" containerID="cri-o://0c83088039377322228a75312f73aef354b452dc32b72f3e7422b93e15270c5a" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.124919 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-replicator" containerID="cri-o://4a16824db8797178b4f0e9143a8c7fd2a4c2b43cd6bdf358987351b835a9dfa3" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.286921 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-cpdrg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.316422 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-cpdrg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.319715 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_abef47d7-89c0-4c16-8474-8db3d2d127e7/ovsdbserver-sb/0.log" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.319795 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.347438 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-vqgqq"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.358040 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-vqgqq"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.366521 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-8a3c-account-create-update-q6nqg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.385526 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-8a3c-account-create-update-q6nqg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396474 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396522 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396562 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396644 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms8wf\" (UniqueName: \"kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396664 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396683 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.396719 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir\") pod \"abef47d7-89c0-4c16-8474-8db3d2d127e7\" (UID: \"abef47d7-89c0-4c16-8474-8db3d2d127e7\") " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.402327 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts" (OuterVolumeSpecName: "scripts") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.403148 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" containerID="cri-o://d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" gracePeriod=29 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.407033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.407406 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.407622 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-log" containerID="cri-o://2402516dd72b79b7045a0fea1c29ce7e8868f7e5288ff8c3baf2331b568a9fc7" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.408100 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config" (OuterVolumeSpecName: "config") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.408114 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-httpd" containerID="cri-o://92aa96879b03afd4dc777ec25243c37e4ffc13dea924e62623d2262a8332c39f" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.453955 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.454513 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-log" containerID="cri-o://c3a1f31f2494aee180667a0f03331fe892eadeaa196337253d864cc858d0d3f1" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.454838 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-httpd" containerID="cri-o://33fb07736ff02fa3a7d27e62002c81a800f65f43ed61da9bdbe44cbe612b4ebe" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.475051 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.479276 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-7vm6q"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.501732 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.501766 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.501776 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.501786 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abef47d7-89c0-4c16-8474-8db3d2d127e7-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.510019 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-7vm6q"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.518480 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf" (OuterVolumeSpecName: "kube-api-access-ms8wf") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "kube-api-access-ms8wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.538987 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-4zhph"] Jan 22 06:10:34 crc kubenswrapper[4982]: W0122 06:10:34.563458 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fb512c6_b4d3_4b1e_93ad_b991e2b9dd21.slice/crio-0a3d0fd591175de11974115910232896e7d5b8a77f3abb830b042319dde5c9bf WatchSource:0}: Error finding container 0a3d0fd591175de11974115910232896e7d5b8a77f3abb830b042319dde5c9bf: Status 404 returned error can't find the container with id 0a3d0fd591175de11974115910232896e7d5b8a77f3abb830b042319dde5c9bf Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.565696 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-4zhph"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.576473 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-49e2-account-create-update-jrl9m"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.584344 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-49e2-account-create-update-jrl9m"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.591147 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.595574 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.595723 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f9754c45-wzfk5" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-api" containerID="cri-o://52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.595792 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f9754c45-wzfk5" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-httpd" containerID="cri-o://f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.603832 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms8wf\" (UniqueName: \"kubernetes.io/projected/abef47d7-89c0-4c16-8474-8db3d2d127e7-kube-api-access-ms8wf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.603935 4982 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604029 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config podName:98d29241-70c5-488d-826a-c37971689e5c nodeName:}" failed. No retries permitted until 2026-01-22 06:10:36.604015731 +0000 UTC m=+1497.442653734 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config") pod "ovn-controller-metrics-w7b5x" (UID: "98d29241-70c5-488d-826a-c37971689e5c") : configmap "ovncontroller-metrics-config" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604060 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604079 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data podName:bb198f24-94e6-4569-be12-9ee57000a3e3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:36.604073582 +0000 UTC m=+1497.442711585 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data") pod "rabbitmq-cell1-server-0" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604101 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-scripts: configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604118 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:36.604112163 +0000 UTC m=+1497.442750166 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-scripts" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604142 4982 configmap.go:193] Couldn't get configMap openstack/ovndbcluster-nb-config: configmap "ovndbcluster-nb-config" not found Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.604157 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config podName:ef18a198-b47b-4e19-a56b-7179e76a1e12 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:36.604152494 +0000 UTC m=+1497.442790497 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config") pod "ovsdbserver-nb-0" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12") : configmap "ovndbcluster-nb-config" not found Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.636278 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-x2w8g"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.657136 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-lkcwv"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.664869 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-x2w8g"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.666486 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ef18a198-b47b-4e19-a56b-7179e76a1e12/ovsdbserver-nb/0.log" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.666528 4982 generic.go:334] "Generic (PLEG): container finished" podID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerID="2e5d780276258df492d4681170a8192ba89d1859402687fc4d4b1699560bb858" exitCode=2 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.666542 4982 generic.go:334] "Generic (PLEG): container finished" podID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerID="44293f881aeb45c5c60b9c080fb0120c9f3a061e77ce519b40e24281772cc8b3" exitCode=143 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.666578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerDied","Data":"2e5d780276258df492d4681170a8192ba89d1859402687fc4d4b1699560bb858"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.666603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerDied","Data":"44293f881aeb45c5c60b9c080fb0120c9f3a061e77ce519b40e24281772cc8b3"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.668401 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-lkcwv"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686378 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nfq8n"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686583 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nfq8n"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686586 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_abef47d7-89c0-4c16-8474-8db3d2d127e7/ovsdbserver-sb/0.log" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"abef47d7-89c0-4c16-8474-8db3d2d127e7","Type":"ContainerDied","Data":"7bc65f0839a686acb37f047307ab2839c7a8c568493c8cfa3c724b41c04d6baa"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686937 4982 scope.go:117] "RemoveContainer" containerID="7ba38d343082ea5a0fb5e591849b42a158f94d9fd61d91a4c98690174b1433f4" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.686900 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.696610 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.696970 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.697004 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.700831 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.707235 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.710432 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.741175 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.741526 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.750918 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.795240 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="rabbitmq" containerID="cri-o://a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727" gracePeriod=604800 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805726 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="310a52e8c276d358ad53746988709b191603600640992af9703c7c83f46a3101" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805756 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="a386b99b03239bfd525b9cda2c117bcba26367449801201fb1b1a6f2e03e42aa" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805766 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="45257d2dda5df21dd5c33ca01b27b1606dde8ba4b98ce40f6c5bf2094829e0de" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805772 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="2d157d8183c5ba1a3484d5097ce18a1c0be897ffeadde7df10cfd7adde58b171" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805779 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="426ef7c3195021e87134941727b96e87ac5a055405603c85838becb9891299c1" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805787 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="f4db4555825c7a98050d3fa64f127578b1f2415604cf41cb3cb729a79fc23f72" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805802 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="be63c0d0c6adf69366e7b0baa98c3881df8d90e7905a24b623cae778f04a2eed" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805809 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="0c83088039377322228a75312f73aef354b452dc32b72f3e7422b93e15270c5a" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805815 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="95bd78bf0085807f031d31a6e3f8ecd58ff306054033420b516a89b6c828e39b" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805821 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="4a16824db8797178b4f0e9143a8c7fd2a4c2b43cd6bdf358987351b835a9dfa3" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805879 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"310a52e8c276d358ad53746988709b191603600640992af9703c7c83f46a3101"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805912 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"a386b99b03239bfd525b9cda2c117bcba26367449801201fb1b1a6f2e03e42aa"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"45257d2dda5df21dd5c33ca01b27b1606dde8ba4b98ce40f6c5bf2094829e0de"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805931 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"2d157d8183c5ba1a3484d5097ce18a1c0be897ffeadde7df10cfd7adde58b171"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"426ef7c3195021e87134941727b96e87ac5a055405603c85838becb9891299c1"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805956 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"f4db4555825c7a98050d3fa64f127578b1f2415604cf41cb3cb729a79fc23f72"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"be63c0d0c6adf69366e7b0baa98c3881df8d90e7905a24b623cae778f04a2eed"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805973 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"0c83088039377322228a75312f73aef354b452dc32b72f3e7422b93e15270c5a"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"95bd78bf0085807f031d31a6e3f8ecd58ff306054033420b516a89b6c828e39b"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.805991 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"4a16824db8797178b4f0e9143a8c7fd2a4c2b43cd6bdf358987351b835a9dfa3"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.807760 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-b7g85" event={"ID":"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21","Type":"ContainerStarted","Data":"0a3d0fd591175de11974115910232896e7d5b8a77f3abb830b042319dde5c9bf"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.809147 4982 generic.go:334] "Generic (PLEG): container finished" podID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerID="08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb" exitCode=143 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.809197 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerDied","Data":"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.810722 4982 generic.go:334] "Generic (PLEG): container finished" podID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerID="c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897" exitCode=143 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.810769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerDied","Data":"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.813240 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.815341 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-w7b5x_98d29241-70c5-488d-826a-c37971689e5c/openstack-network-exporter/0.log" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.815380 4982 generic.go:334] "Generic (PLEG): container finished" podID="98d29241-70c5-488d-826a-c37971689e5c" containerID="62e9a0e84f780bc9d5109a47a1a3b4b18ffc53274df69cc65efda0d894c70ec0" exitCode=2 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.815447 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w7b5x" event={"ID":"98d29241-70c5-488d-826a-c37971689e5c","Type":"ContainerDied","Data":"62e9a0e84f780bc9d5109a47a1a3b4b18ffc53274df69cc65efda0d894c70ec0"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.836085 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerID="2402516dd72b79b7045a0fea1c29ce7e8868f7e5288ff8c3baf2331b568a9fc7" exitCode=143 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.836239 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerDied","Data":"2402516dd72b79b7045a0fea1c29ce7e8868f7e5288ff8c3baf2331b568a9fc7"} Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.838209 4982 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 22 06:10:34 crc kubenswrapper[4982]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 06:10:34 crc kubenswrapper[4982]: + source /usr/local/bin/container-scripts/functions Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNBridge=br-int Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNRemote=tcp:localhost:6642 Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNEncapType=geneve Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNAvailabilityZones= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ EnableChassisAsGateway=true Jan 22 06:10:34 crc kubenswrapper[4982]: ++ PhysicalNetworks= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNHostName= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 06:10:34 crc kubenswrapper[4982]: ++ ovs_dir=/var/lib/openvswitch Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 06:10:34 crc kubenswrapper[4982]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + cleanup_ovsdb_server_semaphore Jan 22 06:10:34 crc kubenswrapper[4982]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 06:10:34 crc kubenswrapper[4982]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-74zx8" message=< Jan 22 06:10:34 crc kubenswrapper[4982]: Exiting ovsdb-server (5) [ OK ] Jan 22 06:10:34 crc kubenswrapper[4982]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 06:10:34 crc kubenswrapper[4982]: + source /usr/local/bin/container-scripts/functions Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNBridge=br-int Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNRemote=tcp:localhost:6642 Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNEncapType=geneve Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNAvailabilityZones= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ EnableChassisAsGateway=true Jan 22 06:10:34 crc kubenswrapper[4982]: ++ PhysicalNetworks= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNHostName= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 06:10:34 crc kubenswrapper[4982]: ++ ovs_dir=/var/lib/openvswitch Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 06:10:34 crc kubenswrapper[4982]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + cleanup_ovsdb_server_semaphore Jan 22 06:10:34 crc kubenswrapper[4982]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 06:10:34 crc kubenswrapper[4982]: > Jan 22 06:10:34 crc kubenswrapper[4982]: E0122 06:10:34.838267 4982 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 22 06:10:34 crc kubenswrapper[4982]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 06:10:34 crc kubenswrapper[4982]: + source /usr/local/bin/container-scripts/functions Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNBridge=br-int Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNRemote=tcp:localhost:6642 Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNEncapType=geneve Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNAvailabilityZones= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ EnableChassisAsGateway=true Jan 22 06:10:34 crc kubenswrapper[4982]: ++ PhysicalNetworks= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ OVNHostName= Jan 22 06:10:34 crc kubenswrapper[4982]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 06:10:34 crc kubenswrapper[4982]: ++ ovs_dir=/var/lib/openvswitch Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 06:10:34 crc kubenswrapper[4982]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 06:10:34 crc kubenswrapper[4982]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + sleep 0.5 Jan 22 06:10:34 crc kubenswrapper[4982]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 06:10:34 crc kubenswrapper[4982]: + cleanup_ovsdb_server_semaphore Jan 22 06:10:34 crc kubenswrapper[4982]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 06:10:34 crc kubenswrapper[4982]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 06:10:34 crc kubenswrapper[4982]: > pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" containerID="cri-o://8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.838335 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" containerID="cri-o://8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" gracePeriod=29 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.854158 4982 generic.go:334] "Generic (PLEG): container finished" podID="46bc1b22-5f45-461f-b090-9c13dd126a6f" containerID="c5436ddb9b05921d7b248ae7c45b009c65f9ad0e3d87b2a6e20de5649be61f53" exitCode=137 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.865597 4982 generic.go:334] "Generic (PLEG): container finished" podID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerID="c57a66ba5563b4184480525b497fbf6412201b1822a40a8b542aa66de3ec6598" exitCode=0 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.865639 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" event={"ID":"b3aeab92-5988-4e7b-9fcf-62c13e364f98","Type":"ContainerDied","Data":"c57a66ba5563b4184480525b497fbf6412201b1822a40a8b542aa66de3ec6598"} Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.884571 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.893122 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.911014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "abef47d7-89c0-4c16-8474-8db3d2d127e7" (UID: "abef47d7-89c0-4c16-8474-8db3d2d127e7"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.914682 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.914705 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abef47d7-89c0-4c16-8474-8db3d2d127e7-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.922237 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-7pgw9"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.938821 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-7pgw9"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.953778 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.954517 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" containerID="cri-o://acd9ebde3d0e0f8fe4696b287db22cd21891b45ba0969952821a0ef3dc3ecee6" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.955402 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" containerID="cri-o://a49fa6161cdc0ad0eb335a0a25470144a700d11c752945b8fa21ab70997554c3" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.967677 4982 scope.go:117] "RemoveContainer" containerID="44180703ae3b87e41617dc3bc742fc8099ef79e67de1a6b50b2b21f0fdc9f2bc" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.967783 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.968007 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-log" containerID="cri-o://261f0791c2831229e062f5721d4579eac13d766cc88596aa36038f23c84e7149" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.968318 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-api" containerID="cri-o://cefbf932aefddcd806c99682cdfc21a0e80875bc3e02c87761c6e4e7b2e2288e" gracePeriod=30 Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.973918 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-w7b5x_98d29241-70c5-488d-826a-c37971689e5c/openstack-network-exporter/0.log" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.973991 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:10:34 crc kubenswrapper[4982]: I0122 06:10:34.974442 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="rabbitmq" containerID="cri-o://5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132" gracePeriod=604800 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.005038 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.017164 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-kx85p"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.025969 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-z4p9q"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.037775 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-kx85p"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.045914 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ef18a198-b47b-4e19-a56b-7179e76a1e12/ovsdbserver-nb/0.log" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.046014 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.048924 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.059337 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-z4p9q"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.065749 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.066071 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener-log" containerID="cri-o://56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.066760 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener" containerID="cri-o://b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.071966 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="galera" containerID="cri-o://3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.073524 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.073766 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68c6f69c56-rbr89" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api-log" containerID="cri-o://6bdbcca209318e7de23c54a80c39908eea30839974039d5c924f9f3adf9c2b3d" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.073890 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68c6f69c56-rbr89" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api" containerID="cri-o://3ebd5e2b4a01d8d16123bc5c692c42b9c4f85f654043c5e5f979425f4bc1aaba" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.078840 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.079067 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-796946d7c7-z4qnr" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker-log" containerID="cri-o://b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.079183 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-796946d7c7-z4qnr" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker" containerID="cri-o://ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.086212 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.086468 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="eff35669-e559-41ee-9a8b-114907cd6ab0" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5cdd5203835aa0d86d98aea0ae6ed7270810a51fcce1c980a5665cd105b86e96" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.098339 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2jrzv"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.111892 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2jrzv"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.118267 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.118498 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerName="nova-cell0-conductor-conductor" containerID="cri-o://d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120316 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120355 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120381 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120420 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120469 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120491 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config\") pod \"46bc1b22-5f45-461f-b090-9c13dd126a6f\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120516 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn4hg\" (UniqueName: \"kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120595 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120612 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120628 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120651 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret\") pod \"46bc1b22-5f45-461f-b090-9c13dd126a6f\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120681 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwvjc\" (UniqueName: \"kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120700 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d94xx\" (UniqueName: \"kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120719 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120739 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle\") pod \"46bc1b22-5f45-461f-b090-9c13dd126a6f\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120759 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120778 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120803 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle\") pod \"98d29241-70c5-488d-826a-c37971689e5c\" (UID: \"98d29241-70c5-488d-826a-c37971689e5c\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120820 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc\") pod \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\" (UID: \"b3aeab92-5988-4e7b-9fcf-62c13e364f98\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120838 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpnnd\" (UniqueName: \"kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd\") pod \"46bc1b22-5f45-461f-b090-9c13dd126a6f\" (UID: \"46bc1b22-5f45-461f-b090-9c13dd126a6f\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.120871 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts\") pod \"ef18a198-b47b-4e19-a56b-7179e76a1e12\" (UID: \"ef18a198-b47b-4e19-a56b-7179e76a1e12\") " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.122793 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts" (OuterVolumeSpecName: "scripts") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.124983 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.125705 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config" (OuterVolumeSpecName: "config") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.125753 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.126436 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config" (OuterVolumeSpecName: "config") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.126903 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.128836 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.129097 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" containerID="cri-o://cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.136177 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r9qxz"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.142151 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r9qxz"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.147672 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.147891 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" containerName="nova-scheduler-scheduler" containerID="cri-o://98cf6e03c88eac06c166fdc5c9a1486e6fc1f8e13aa0eff354dcd9303c5e399f" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.149133 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.155802 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx" (OuterVolumeSpecName: "kube-api-access-d94xx") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "kube-api-access-d94xx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.161079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd" (OuterVolumeSpecName: "kube-api-access-rpnnd") pod "46bc1b22-5f45-461f-b090-9c13dd126a6f" (UID: "46bc1b22-5f45-461f-b090-9c13dd126a6f"). InnerVolumeSpecName "kube-api-access-rpnnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.164988 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg" (OuterVolumeSpecName: "kube-api-access-kn4hg") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "kube-api-access-kn4hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.165947 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc" (OuterVolumeSpecName: "kube-api-access-dwvjc") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "kube-api-access-dwvjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.169932 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.177540 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227673 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpnnd\" (UniqueName: \"kubernetes.io/projected/46bc1b22-5f45-461f-b090-9c13dd126a6f-kube-api-access-rpnnd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227708 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227734 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227745 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227756 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn4hg\" (UniqueName: \"kubernetes.io/projected/98d29241-70c5-488d-826a-c37971689e5c-kube-api-access-kn4hg\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227765 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227774 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef18a198-b47b-4e19-a56b-7179e76a1e12-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227782 4982 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/98d29241-70c5-488d-826a-c37971689e5c-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227791 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwvjc\" (UniqueName: \"kubernetes.io/projected/b3aeab92-5988-4e7b-9fcf-62c13e364f98-kube-api-access-dwvjc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227799 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d94xx\" (UniqueName: \"kubernetes.io/projected/ef18a198-b47b-4e19-a56b-7179e76a1e12-kube-api-access-d94xx\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.227808 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d29241-70c5-488d-826a-c37971689e5c-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.288336 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.292284 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.307184 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "46bc1b22-5f45-461f-b090-9c13dd126a6f" (UID: "46bc1b22-5f45-461f-b090-9c13dd126a6f"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.332309 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.332333 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.332342 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.333044 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.359881 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46bc1b22-5f45-461f-b090-9c13dd126a6f" (UID: "46bc1b22-5f45-461f-b090-9c13dd126a6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.368206 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.376775 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: W0122 06:10:35.387233 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66a74d17_3793_4757_b423_503ee60fc4d5.slice/crio-ae0547f33ed4b4d4d2103d05432f859ce01dff5562f415ed88ee2ea930a122f8 WatchSource:0}: Error finding container ae0547f33ed4b4d4d2103d05432f859ce01dff5562f415ed88ee2ea930a122f8: Status 404 returned error can't find the container with id ae0547f33ed4b4d4d2103d05432f859ce01dff5562f415ed88ee2ea930a122f8 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.388798 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.393539 4982 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 06:10:35 crc kubenswrapper[4982]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: if [ -n "nova_api" ]; then Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="nova_api" Jan 22 06:10:35 crc kubenswrapper[4982]: else Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="*" Jan 22 06:10:35 crc kubenswrapper[4982]: fi Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: # going for maximum compatibility here: Jan 22 06:10:35 crc kubenswrapper[4982]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 06:10:35 crc kubenswrapper[4982]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 06:10:35 crc kubenswrapper[4982]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 06:10:35 crc kubenswrapper[4982]: # support updates Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: $MYSQL_CMD < logger="UnhandledError" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.395228 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-a886-account-create-update-l7kpg" podUID="66a74d17-3793-4757-b423-503ee60fc4d5" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.408736 4982 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 06:10:35 crc kubenswrapper[4982]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: if [ -n "nova_cell1" ]; then Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="nova_cell1" Jan 22 06:10:35 crc kubenswrapper[4982]: else Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="*" Jan 22 06:10:35 crc kubenswrapper[4982]: fi Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: # going for maximum compatibility here: Jan 22 06:10:35 crc kubenswrapper[4982]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 06:10:35 crc kubenswrapper[4982]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 06:10:35 crc kubenswrapper[4982]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 06:10:35 crc kubenswrapper[4982]: # support updates Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: $MYSQL_CMD < logger="UnhandledError" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.410197 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" podUID="de93226c-14b2-42b2-a553-2535096d7b81" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.427222 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.434682 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.434709 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.434719 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.434728 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.435300 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.439573 4982 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 06:10:35 crc kubenswrapper[4982]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: if [ -n "nova_cell0" ]; then Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="nova_cell0" Jan 22 06:10:35 crc kubenswrapper[4982]: else Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="*" Jan 22 06:10:35 crc kubenswrapper[4982]: fi Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: # going for maximum compatibility here: Jan 22 06:10:35 crc kubenswrapper[4982]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 06:10:35 crc kubenswrapper[4982]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 06:10:35 crc kubenswrapper[4982]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 06:10:35 crc kubenswrapper[4982]: # support updates Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: $MYSQL_CMD < logger="UnhandledError" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.441772 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" podUID="dffeeb11-fe93-4cb1-ad5c-e77fbe664a70" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.448333 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.448471 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config" (OuterVolumeSpecName: "config") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.448535 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "46bc1b22-5f45-461f-b090-9c13dd126a6f" (UID: "46bc1b22-5f45-461f-b090-9c13dd126a6f"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.460601 4982 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 06:10:35 crc kubenswrapper[4982]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: if [ -n "barbican" ]; then Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="barbican" Jan 22 06:10:35 crc kubenswrapper[4982]: else Jan 22 06:10:35 crc kubenswrapper[4982]: GRANT_DATABASE="*" Jan 22 06:10:35 crc kubenswrapper[4982]: fi Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: # going for maximum compatibility here: Jan 22 06:10:35 crc kubenswrapper[4982]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 06:10:35 crc kubenswrapper[4982]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 06:10:35 crc kubenswrapper[4982]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 06:10:35 crc kubenswrapper[4982]: # support updates Jan 22 06:10:35 crc kubenswrapper[4982]: Jan 22 06:10:35 crc kubenswrapper[4982]: $MYSQL_CMD < logger="UnhandledError" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.461989 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-a8f0-account-create-update-nclgk" podUID="bff36695-f9e7-4506-95b2-a1c99296fe75" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.474517 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.480388 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.481376 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3aeab92-5988-4e7b-9fcf-62c13e364f98" (UID: "b3aeab92-5988-4e7b-9fcf-62c13e364f98"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.538200 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.538243 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.538259 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/46bc1b22-5f45-461f-b090-9c13dd126a6f-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.538271 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.538283 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3aeab92-5988-4e7b-9fcf-62c13e364f98-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.593075 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "98d29241-70c5-488d-826a-c37971689e5c" (UID: "98d29241-70c5-488d-826a-c37971689e5c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.620222 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.620437 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-httpd" containerID="cri-o://0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.620808 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-server" containerID="cri-o://9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" gracePeriod=30 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.639555 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98d29241-70c5-488d-826a-c37971689e5c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.649813 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "ef18a198-b47b-4e19-a56b-7179e76a1e12" (UID: "ef18a198-b47b-4e19-a56b-7179e76a1e12"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.731474 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="113886a4-e806-4fd8-b678-f8051fa8b3ba" path="/var/lib/kubelet/pods/113886a4-e806-4fd8-b678-f8051fa8b3ba/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.732375 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a36a50-28da-44d3-934a-89bcf37ac576" path="/var/lib/kubelet/pods/29a36a50-28da-44d3-934a-89bcf37ac576/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.732894 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c" path="/var/lib/kubelet/pods/2d9ec2ea-0e8f-4972-aa94-fb5f3f9d199c/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.734294 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46bc1b22-5f45-461f-b090-9c13dd126a6f" path="/var/lib/kubelet/pods/46bc1b22-5f45-461f-b090-9c13dd126a6f/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.735090 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1" path="/var/lib/kubelet/pods/4e5951fa-0c5c-4a99-bfaf-a151d2cca3b1/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.735894 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56abf6c5-1b18-45ab-ab46-8359b05d5a19" path="/var/lib/kubelet/pods/56abf6c5-1b18-45ab-ab46-8359b05d5a19/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.736369 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60dd52f0-a379-40e6-b2e3-dcd5c296bc59" path="/var/lib/kubelet/pods/60dd52f0-a379-40e6-b2e3-dcd5c296bc59/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.737498 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63574be5-6a79-44af-b36a-2d32bd676edd" path="/var/lib/kubelet/pods/63574be5-6a79-44af-b36a-2d32bd676edd/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.738067 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c26adbc-0d63-423d-b805-bc5e866bf6f2" path="/var/lib/kubelet/pods/6c26adbc-0d63-423d-b805-bc5e866bf6f2/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.740755 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96ba8b13-a38d-49d1-9f3b-a24ec4dabbee" path="/var/lib/kubelet/pods/96ba8b13-a38d-49d1-9f3b-a24ec4dabbee/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.741372 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" path="/var/lib/kubelet/pods/abef47d7-89c0-4c16-8474-8db3d2d127e7/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.742040 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aff6a5d2-6643-4aa8-8427-c01cfc232232" path="/var/lib/kubelet/pods/aff6a5d2-6643-4aa8-8427-c01cfc232232/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.743031 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1bbcbea-8862-4397-a672-5083ba34c23e" path="/var/lib/kubelet/pods/b1bbcbea-8862-4397-a672-5083ba34c23e/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.743532 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b217180b-2309-401f-a98c-96ecd04ce551" path="/var/lib/kubelet/pods/b217180b-2309-401f-a98c-96ecd04ce551/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.744288 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9682832-1d53-4341-9dbc-10f057d1f6ad" path="/var/lib/kubelet/pods/b9682832-1d53-4341-9dbc-10f057d1f6ad/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.744405 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef18a198-b47b-4e19-a56b-7179e76a1e12-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.745350 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c42a1abb-5389-40ec-b9b3-c7e7c3ea6926" path="/var/lib/kubelet/pods/c42a1abb-5389-40ec-b9b3-c7e7c3ea6926/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.745820 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb36afed-98e0-460c-ac13-f09334f4334e" path="/var/lib/kubelet/pods/cb36afed-98e0-460c-ac13-f09334f4334e/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.746329 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb64b830-eb39-4a4c-8413-04876d86e890" path="/var/lib/kubelet/pods/eb64b830-eb39-4a4c-8413-04876d86e890/volumes" Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.763965 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.765898 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.766816 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 06:10:35 crc kubenswrapper[4982]: E0122 06:10:35.766945 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="ovn-northd" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.906880 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerDied","Data":"acd9ebde3d0e0f8fe4696b287db22cd21891b45ba0969952821a0ef3dc3ecee6"} Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.906876 4982 generic.go:334] "Generic (PLEG): container finished" podID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerID="acd9ebde3d0e0f8fe4696b287db22cd21891b45ba0969952821a0ef3dc3ecee6" exitCode=143 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.909414 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" event={"ID":"de93226c-14b2-42b2-a553-2535096d7b81","Type":"ContainerStarted","Data":"17d28263df110c87238340d34cea08d6b255cbb04297cbd9a803f8f66ef076a7"} Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.920090 4982 generic.go:334] "Generic (PLEG): container finished" podID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerID="b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1" exitCode=143 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.920177 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerDied","Data":"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1"} Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.973224 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ef18a198-b47b-4e19-a56b-7179e76a1e12/ovsdbserver-nb/0.log" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.973348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ef18a198-b47b-4e19-a56b-7179e76a1e12","Type":"ContainerDied","Data":"bc060a54f8f557a40abbece0adb977ab04db056f47bdec562138802ab72ea2ae"} Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.973390 4982 scope.go:117] "RemoveContainer" containerID="2e5d780276258df492d4681170a8192ba89d1859402687fc4d4b1699560bb858" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.973394 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.986619 4982 generic.go:334] "Generic (PLEG): container finished" podID="eff35669-e559-41ee-9a8b-114907cd6ab0" containerID="5cdd5203835aa0d86d98aea0ae6ed7270810a51fcce1c980a5665cd105b86e96" exitCode=0 Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.986692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eff35669-e559-41ee-9a8b-114907cd6ab0","Type":"ContainerDied","Data":"5cdd5203835aa0d86d98aea0ae6ed7270810a51fcce1c980a5665cd105b86e96"} Jan 22 06:10:35 crc kubenswrapper[4982]: I0122 06:10:35.990844 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-l7kpg" event={"ID":"66a74d17-3793-4757-b423-503ee60fc4d5","Type":"ContainerStarted","Data":"ae0547f33ed4b4d4d2103d05432f859ce01dff5562f415ed88ee2ea930a122f8"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.039123 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.070410 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.070490 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data podName:7623a66b-412d-4202-bd05-58fba1c6a3d3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:40.070458152 +0000 UTC m=+1500.909096145 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data") pod "rabbitmq-server-0" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3") : configmap "rabbitmq-config-data" not found Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081384 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="b878ca638cdba06e768d1e1407882909e1a9532997c7390f2d2cc87458ce86aa" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081715 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="e4748ea7b210414126713179ef219e82a0abc3f56fde6ec484724799d53bea45" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081724 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="f0e4c628f7ebdabca21ce9e2c5341af6fc155bfe4b2913c68c56bd267c6a4629" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081730 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="78cb51cdcbb32c7c566dd440fe3eddf657c1d8fa7d725c7170c6f86381854c75" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081775 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"b878ca638cdba06e768d1e1407882909e1a9532997c7390f2d2cc87458ce86aa"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"e4748ea7b210414126713179ef219e82a0abc3f56fde6ec484724799d53bea45"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081808 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"f0e4c628f7ebdabca21ce9e2c5341af6fc155bfe4b2913c68c56bd267c6a4629"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.081817 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"78cb51cdcbb32c7c566dd440fe3eddf657c1d8fa7d725c7170c6f86381854c75"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.086152 4982 generic.go:334] "Generic (PLEG): container finished" podID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerID="c3a1f31f2494aee180667a0f03331fe892eadeaa196337253d864cc858d0d3f1" exitCode=143 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.086186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerDied","Data":"c3a1f31f2494aee180667a0f03331fe892eadeaa196337253d864cc858d0d3f1"} Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.090456 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.091164 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-w7b5x_98d29241-70c5-488d-826a-c37971689e5c/openstack-network-exporter/0.log" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.091273 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w7b5x" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.092001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w7b5x" event={"ID":"98d29241-70c5-488d-826a-c37971689e5c","Type":"ContainerDied","Data":"35ff9eb84925d839a1b52a49f3acab8b9287bedc5e9f612fb4170721d6167c29"} Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.092095 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.093287 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.093318 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.106706 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.107799 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerID="6bdbcca209318e7de23c54a80c39908eea30839974039d5c924f9f3adf9c2b3d" exitCode=143 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.107863 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerDied","Data":"6bdbcca209318e7de23c54a80c39908eea30839974039d5c924f9f3adf9c2b3d"} Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.108301 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.109243 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.109273 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.110017 4982 generic.go:334] "Generic (PLEG): container finished" podID="b6593811-6583-4900-b402-5af9db3887b3" containerID="f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.110054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerDied","Data":"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.115659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" event={"ID":"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70","Type":"ContainerStarted","Data":"4c2961de1d7496427f2134ba4205ee5cac3ba70880b62ae71e1869169dc30eed"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.132789 4982 generic.go:334] "Generic (PLEG): container finished" podID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerID="261f0791c2831229e062f5721d4579eac13d766cc88596aa36038f23c84e7149" exitCode=143 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.132877 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerDied","Data":"261f0791c2831229e062f5721d4579eac13d766cc88596aa36038f23c84e7149"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.134377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-nclgk" event={"ID":"bff36695-f9e7-4506-95b2-a1c99296fe75","Type":"ContainerStarted","Data":"897b30335a39e8fa77655e8853a035e43cac49eddc976b61970a856744fa796f"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.145060 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerID="56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98" exitCode=143 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.145153 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerDied","Data":"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.152914 4982 generic.go:334] "Generic (PLEG): container finished" podID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerID="e22bfcf49126051d18c96c4b12bfda7fb7db2a8d1c02fa24e1431a150649a392" exitCode=1 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.153553 4982 scope.go:117] "RemoveContainer" containerID="e22bfcf49126051d18c96c4b12bfda7fb7db2a8d1c02fa24e1431a150649a392" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.154043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-b7g85" event={"ID":"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21","Type":"ContainerDied","Data":"e22bfcf49126051d18c96c4b12bfda7fb7db2a8d1c02fa24e1431a150649a392"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.189011 4982 generic.go:334] "Generic (PLEG): container finished" podID="e8478549-adf2-4e04-b404-f4882ed405d5" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.189176 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerDied","Data":"8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.193017 4982 generic.go:334] "Generic (PLEG): container finished" podID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerID="a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d" exitCode=0 Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.193136 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerDied","Data":"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.218645 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" event={"ID":"b3aeab92-5988-4e7b-9fcf-62c13e364f98","Type":"ContainerDied","Data":"d8c6537a32a128c90801c32a32ab42b41072520eeb92c3cd4b545b201e24f11a"} Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.218986 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-4kgmx" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.237190 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.248877 4982 scope.go:117] "RemoveContainer" containerID="44293f881aeb45c5c60b9c080fb0120c9f3a061e77ce519b40e24281772cc8b3" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.254321 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.259219 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod536304e2_bf6d_4c4a_a617_69be7bf2931e.slice/crio-3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd4f5182_1143_4df0_93c1_33e344a1a204.slice/crio-0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd4f5182_1143_4df0_93c1_33e344a1a204.slice/crio-conmon-0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd4f5182_1143_4df0_93c1_33e344a1a204.slice/crio-conmon-9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd4f5182_1143_4df0_93c1_33e344a1a204.slice/crio-9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d.scope\": RecentStats: unable to find data in memory cache]" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.270211 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-w7b5x"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.277024 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle\") pod \"eff35669-e559-41ee-9a8b-114907cd6ab0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.277108 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7gxj\" (UniqueName: \"kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj\") pod \"eff35669-e559-41ee-9a8b-114907cd6ab0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.277216 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs\") pod \"eff35669-e559-41ee-9a8b-114907cd6ab0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.277256 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data\") pod \"eff35669-e559-41ee-9a8b-114907cd6ab0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.277331 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs\") pod \"eff35669-e559-41ee-9a8b-114907cd6ab0\" (UID: \"eff35669-e559-41ee-9a8b-114907cd6ab0\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.295148 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj" (OuterVolumeSpecName: "kube-api-access-l7gxj") pod "eff35669-e559-41ee-9a8b-114907cd6ab0" (UID: "eff35669-e559-41ee-9a8b-114907cd6ab0"). InnerVolumeSpecName "kube-api-access-l7gxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.298322 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.307073 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.310410 4982 scope.go:117] "RemoveContainer" containerID="c5436ddb9b05921d7b248ae7c45b009c65f9ad0e3d87b2a6e20de5649be61f53" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.312955 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.315934 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eff35669-e559-41ee-9a8b-114907cd6ab0" (UID: "eff35669-e559-41ee-9a8b-114907cd6ab0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.326154 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-4kgmx"] Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.347207 4982 scope.go:117] "RemoveContainer" containerID="62e9a0e84f780bc9d5109a47a1a3b4b18ffc53274df69cc65efda0d894c70ec0" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.348291 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "eff35669-e559-41ee-9a8b-114907cd6ab0" (UID: "eff35669-e559-41ee-9a8b-114907cd6ab0"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.373056 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data" (OuterVolumeSpecName: "config-data") pod "eff35669-e559-41ee-9a8b-114907cd6ab0" (UID: "eff35669-e559-41ee-9a8b-114907cd6ab0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.380540 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.380569 4982 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.380580 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.380590 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7gxj\" (UniqueName: \"kubernetes.io/projected/eff35669-e559-41ee-9a8b-114907cd6ab0-kube-api-access-l7gxj\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.393294 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.402493 4982 scope.go:117] "RemoveContainer" containerID="c57a66ba5563b4184480525b497fbf6412201b1822a40a8b542aa66de3ec6598" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.429023 4982 scope.go:117] "RemoveContainer" containerID="e0a7f6b0cbf180a5138362a5544c8d671b128b24285c4947149a0f2feef86c33" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.436118 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "eff35669-e559-41ee-9a8b-114907cd6ab0" (UID: "eff35669-e559-41ee-9a8b-114907cd6ab0"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.481454 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts\") pod \"de93226c-14b2-42b2-a553-2535096d7b81\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.481580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n78hz\" (UniqueName: \"kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz\") pod \"de93226c-14b2-42b2-a553-2535096d7b81\" (UID: \"de93226c-14b2-42b2-a553-2535096d7b81\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.481959 4982 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/eff35669-e559-41ee-9a8b-114907cd6ab0-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.482051 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de93226c-14b2-42b2-a553-2535096d7b81" (UID: "de93226c-14b2-42b2-a553-2535096d7b81"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.487437 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz" (OuterVolumeSpecName: "kube-api-access-n78hz") pod "de93226c-14b2-42b2-a553-2535096d7b81" (UID: "de93226c-14b2-42b2-a553-2535096d7b81"). InnerVolumeSpecName "kube-api-access-n78hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.584180 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de93226c-14b2-42b2-a553-2535096d7b81-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.584221 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n78hz\" (UniqueName: \"kubernetes.io/projected/de93226c-14b2-42b2-a553-2535096d7b81-kube-api-access-n78hz\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.635011 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.685775 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zdb7\" (UniqueName: \"kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.685828 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.686317 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.686431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.686466 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.686514 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.687093 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.687482 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.687493 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.687979 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated\") pod \"536304e2-bf6d-4c4a-a617-69be7bf2931e\" (UID: \"536304e2-bf6d-4c4a-a617-69be7bf2931e\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.688710 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.688752 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.688768 4982 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.688838 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:36 crc kubenswrapper[4982]: E0122 06:10:36.688964 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data podName:bb198f24-94e6-4569-be12-9ee57000a3e3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:40.688949022 +0000 UTC m=+1501.527587025 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data") pod "rabbitmq-cell1-server-0" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.689563 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.703960 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7" (OuterVolumeSpecName: "kube-api-access-5zdb7") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "kube-api-access-5zdb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.724987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.731146 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "mysql-db") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.790839 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/536304e2-bf6d-4c4a-a617-69be7bf2931e-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.791081 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zdb7\" (UniqueName: \"kubernetes.io/projected/536304e2-bf6d-4c4a-a617-69be7bf2931e-kube-api-access-5zdb7\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.791102 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.791113 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.791124 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/536304e2-bf6d-4c4a-a617-69be7bf2931e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.804061 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "536304e2-bf6d-4c4a-a617-69be7bf2931e" (UID: "536304e2-bf6d-4c4a-a617-69be7bf2931e"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.809765 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.830519 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.852968 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.871512 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.872963 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.896601 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.896670 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nx9xf\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.896690 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts\") pod \"bff36695-f9e7-4506-95b2-a1c99296fe75\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.896708 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wbmf\" (UniqueName: \"kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf\") pod \"bff36695-f9e7-4506-95b2-a1c99296fe75\" (UID: \"bff36695-f9e7-4506-95b2-a1c99296fe75\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897622 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897661 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts\") pod \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897693 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-885d6\" (UniqueName: \"kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6\") pod \"66a74d17-3793-4757-b423-503ee60fc4d5\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897745 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897771 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897803 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897842 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897899 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts\") pod \"66a74d17-3793-4757-b423-503ee60fc4d5\" (UID: \"66a74d17-3793-4757-b423-503ee60fc4d5\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897947 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmbp6\" (UniqueName: \"kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6\") pod \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\" (UID: \"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.898014 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle\") pod \"bd4f5182-1143-4df0-93c1-33e344a1a204\" (UID: \"bd4f5182-1143-4df0-93c1-33e344a1a204\") " Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.898396 4982 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/536304e2-bf6d-4c4a-a617-69be7bf2931e-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.898417 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.896925 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.897245 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bff36695-f9e7-4506-95b2-a1c99296fe75" (UID: "bff36695-f9e7-4506-95b2-a1c99296fe75"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.900025 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf" (OuterVolumeSpecName: "kube-api-access-nx9xf") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "kube-api-access-nx9xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.900541 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf" (OuterVolumeSpecName: "kube-api-access-4wbmf") pod "bff36695-f9e7-4506-95b2-a1c99296fe75" (UID: "bff36695-f9e7-4506-95b2-a1c99296fe75"). InnerVolumeSpecName "kube-api-access-4wbmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.901269 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dffeeb11-fe93-4cb1-ad5c-e77fbe664a70" (UID: "dffeeb11-fe93-4cb1-ad5c-e77fbe664a70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.901304 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.902166 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66a74d17-3793-4757-b423-503ee60fc4d5" (UID: "66a74d17-3793-4757-b423-503ee60fc4d5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.906045 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6" (OuterVolumeSpecName: "kube-api-access-fmbp6") pod "dffeeb11-fe93-4cb1-ad5c-e77fbe664a70" (UID: "dffeeb11-fe93-4cb1-ad5c-e77fbe664a70"). InnerVolumeSpecName "kube-api-access-fmbp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.919649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.925572 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6" (OuterVolumeSpecName: "kube-api-access-885d6") pod "66a74d17-3793-4757-b423-503ee60fc4d5" (UID: "66a74d17-3793-4757-b423-503ee60fc4d5"). InnerVolumeSpecName "kube-api-access-885d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.948165 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data" (OuterVolumeSpecName: "config-data") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.950032 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.967718 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:36 crc kubenswrapper[4982]: I0122 06:10:36.969952 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bd4f5182-1143-4df0-93c1-33e344a1a204" (UID: "bd4f5182-1143-4df0-93c1-33e344a1a204"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001104 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001134 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001143 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nx9xf\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-kube-api-access-nx9xf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001153 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bff36695-f9e7-4506-95b2-a1c99296fe75-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001163 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wbmf\" (UniqueName: \"kubernetes.io/projected/bff36695-f9e7-4506-95b2-a1c99296fe75-kube-api-access-4wbmf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001171 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bd4f5182-1143-4df0-93c1-33e344a1a204-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001180 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001188 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-885d6\" (UniqueName: \"kubernetes.io/projected/66a74d17-3793-4757-b423-503ee60fc4d5-kube-api-access-885d6\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001196 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001204 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001212 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd4f5182-1143-4df0-93c1-33e344a1a204-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001219 4982 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd4f5182-1143-4df0-93c1-33e344a1a204-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001250 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a74d17-3793-4757-b423-503ee60fc4d5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.001259 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmbp6\" (UniqueName: \"kubernetes.io/projected/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70-kube-api-access-fmbp6\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.080459 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.080757 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-central-agent" containerID="cri-o://7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.081011 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="proxy-httpd" containerID="cri-o://fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.081138 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="sg-core" containerID="cri-o://140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.081186 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-notification-agent" containerID="cri-o://18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.095963 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.096173 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="07b70872-6840-498b-be43-290f43590bb9" containerName="kube-state-metrics" containerID="cri-o://addbc96e6718c32e48886fcf4a4c3eca72d548b434d5453710a2fcc4daabe338" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.282380 4982 generic.go:334] "Generic (PLEG): container finished" podID="681a16ff-1468-4a9a-a692-5461230072bd" containerID="140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594" exitCode=2 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.282750 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerDied","Data":"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.302055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" event={"ID":"dffeeb11-fe93-4cb1-ad5c-e77fbe664a70","Type":"ContainerDied","Data":"4c2961de1d7496427f2134ba4205ee5cac3ba70880b62ae71e1869169dc30eed"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.302143 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-777f-account-create-update-ssrcg" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.312091 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.312364 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"eff35669-e559-41ee-9a8b-114907cd6ab0","Type":"ContainerDied","Data":"ba8484cd48d8bef98f75cf838831f3280fdbf6efb0419f484b66489e24684ac5"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.312457 4982 scope.go:117] "RemoveContainer" containerID="5cdd5203835aa0d86d98aea0ae6ed7270810a51fcce1c980a5665cd105b86e96" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.332093 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a886-account-create-update-l7kpg" event={"ID":"66a74d17-3793-4757-b423-503ee60fc4d5","Type":"ContainerDied","Data":"ae0547f33ed4b4d4d2103d05432f859ce01dff5562f415ed88ee2ea930a122f8"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.332157 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a886-account-create-update-l7kpg" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.344781 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-61bb-account-create-update-5ntzd"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.385913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.386136 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerName="memcached" containerID="cri-o://f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.395113 4982 generic.go:334] "Generic (PLEG): container finished" podID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerID="a6ac05cf7240c43f23092e380fa1df8582d0653aa59e96d2961d318758638b64" exitCode=1 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.395201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-b7g85" event={"ID":"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21","Type":"ContainerDied","Data":"a6ac05cf7240c43f23092e380fa1df8582d0653aa59e96d2961d318758638b64"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.412377 4982 scope.go:117] "RemoveContainer" containerID="e22bfcf49126051d18c96c4b12bfda7fb7db2a8d1c02fa24e1431a150649a392" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.426705 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-61bb-account-create-update-5ntzd"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.443113 4982 generic.go:334] "Generic (PLEG): container finished" podID="07b70872-6840-498b-be43-290f43590bb9" containerID="addbc96e6718c32e48886fcf4a4c3eca72d548b434d5453710a2fcc4daabe338" exitCode=2 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.443203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"07b70872-6840-498b-be43-290f43590bb9","Type":"ContainerDied","Data":"addbc96e6718c32e48886fcf4a4c3eca72d548b434d5453710a2fcc4daabe338"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.443876 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-61bb-account-create-update-56s5n"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444246 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="ovsdbserver-nb" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444302 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="ovsdbserver-nb" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444353 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-httpd" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444397 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-httpd" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444459 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff35669-e559-41ee-9a8b-114907cd6ab0" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444515 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff35669-e559-41ee-9a8b-114907cd6ab0" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444568 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-server" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444614 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-server" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444662 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="dnsmasq-dns" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444708 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="dnsmasq-dns" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444767 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="ovsdbserver-sb" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444818 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="ovsdbserver-sb" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.444885 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.444969 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.445030 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="galera" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445077 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="galera" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.445126 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="mysql-bootstrap" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445180 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="mysql-bootstrap" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.445356 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d29241-70c5-488d-826a-c37971689e5c" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445409 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d29241-70c5-488d-826a-c37971689e5c" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.445458 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="init" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445502 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="init" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.445548 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445597 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.445801 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d29241-70c5-488d-826a-c37971689e5c" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.457995 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff35669-e559-41ee-9a8b-114907cd6ab0" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458157 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-httpd" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458221 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458314 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerName="galera" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458374 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="ovsdbserver-sb" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458423 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="abef47d7-89c0-4c16-8474-8db3d2d127e7" containerName="openstack-network-exporter" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458478 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" containerName="dnsmasq-dns" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458532 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" containerName="ovsdbserver-nb" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.458579 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerName="proxy-server" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.459236 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.492114 4982 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-b7g85" secret="" err="secret \"galera-openstack-dockercfg-m2mgh\" not found" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.492176 4982 scope.go:117] "RemoveContainer" containerID="a6ac05cf7240c43f23092e380fa1df8582d0653aa59e96d2961d318758638b64" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.496759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" event={"ID":"de93226c-14b2-42b2-a553-2535096d7b81","Type":"ContainerDied","Data":"17d28263df110c87238340d34cea08d6b255cbb04297cbd9a803f8f66ef076a7"} Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.501258 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-b7g85_openstack(9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21)\"" pod="openstack/root-account-create-update-b7g85" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.502473 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-664cd4b9c4-ccktp" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.150:8778/\": read tcp 10.217.0.2:49794->10.217.0.150:8778: read: connection reset by peer" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.502640 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-664cd4b9c4-ccktp" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.150:8778/\": read tcp 10.217.0.2:49782->10.217.0.150:8778: read: connection reset by peer" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.502879 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bda6-account-create-update-jwx8g" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.510124 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512453 4982 generic.go:334] "Generic (PLEG): container finished" podID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerID="9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" exitCode=0 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512473 4982 generic.go:334] "Generic (PLEG): container finished" podID="bd4f5182-1143-4df0-93c1-33e344a1a204" containerID="0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" exitCode=0 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512518 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerDied","Data":"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512601 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512765 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerDied","Data":"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.512807 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b4bc7b64f-sqvw9" event={"ID":"bd4f5182-1143-4df0-93c1-33e344a1a204","Type":"ContainerDied","Data":"2c8e942d6ed813c8223d98ac6012f17f7a1ceea3c31bdaf488c2ad3651dbc7ad"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.520319 4982 scope.go:117] "RemoveContainer" containerID="9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.520665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.520730 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvbg6\" (UniqueName: \"kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.520937 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.520981 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts podName:9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:38.020966805 +0000 UTC m=+1498.859604808 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts") pod "root-account-create-update-b7g85" (UID: "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21") : configmap "openstack-scripts" not found Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.544957 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-61bb-account-create-update-56s5n"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.585980 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.601950 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-vdfzh"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.602534 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a8f0-account-create-update-nclgk" event={"ID":"bff36695-f9e7-4506-95b2-a1c99296fe75","Type":"ContainerDied","Data":"897b30335a39e8fa77655e8853a035e43cac49eddc976b61970a856744fa796f"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.602641 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a8f0-account-create-update-nclgk" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.614057 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-vdfzh"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.615167 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.621228 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-z27bt"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.622797 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.622864 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.623044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvbg6\" (UniqueName: \"kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.623319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.623452 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.623505 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:38.123490146 +0000 UTC m=+1498.962128149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : configmap "openstack-scripts" not found Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.626622 4982 projected.go:194] Error preparing data for projected volume kube-api-access-gvbg6 for pod openstack/keystone-61bb-account-create-update-56s5n: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.626671 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6 podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:38.126656289 +0000 UTC m=+1498.965294292 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-gvbg6" (UniqueName: "kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.628834 4982 generic.go:334] "Generic (PLEG): container finished" podID="536304e2-bf6d-4c4a-a617-69be7bf2931e" containerID="3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb" exitCode=0 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.628885 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerDied","Data":"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.628906 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"536304e2-bf6d-4c4a-a617-69be7bf2931e","Type":"ContainerDied","Data":"6c7dbd60d0a9b0b4fe53b885330ca39643b037af623fb5a8496214d5dd718e6a"} Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.628983 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.639538 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-z27bt"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.684712 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.685349 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-7c5dd486cd-r6bbs" podUID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" containerName="keystone-api" containerID="cri-o://d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf" gracePeriod=30 Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.711242 4982 scope.go:117] "RemoveContainer" containerID="0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.718874 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.736972 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26e1b065-9ba1-4c27-90a3-11b78de079b7" path="/var/lib/kubelet/pods/26e1b065-9ba1-4c27-90a3-11b78de079b7/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.737532 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e75f34c-23aa-46c5-a382-7ff49efa327e" path="/var/lib/kubelet/pods/6e75f34c-23aa-46c5-a382-7ff49efa327e/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.757503 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1" path="/var/lib/kubelet/pods/96f4b9a1-0dfb-4b21-bfe2-77dc43dd70b1/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.758197 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98d29241-70c5-488d-826a-c37971689e5c" path="/var/lib/kubelet/pods/98d29241-70c5-488d-826a-c37971689e5c/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.759457 4982 scope.go:117] "RemoveContainer" containerID="9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.759946 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3aeab92-5988-4e7b-9fcf-62c13e364f98" path="/var/lib/kubelet/pods/b3aeab92-5988-4e7b-9fcf-62c13e364f98/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.760816 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef18a198-b47b-4e19-a56b-7179e76a1e12" path="/var/lib/kubelet/pods/ef18a198-b47b-4e19-a56b-7179e76a1e12/volumes" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.772110 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.772138 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d\": container with ID starting with 9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d not found: ID does not exist" containerID="9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.772198 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d"} err="failed to get container status \"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d\": rpc error: code = NotFound desc = could not find container \"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d\": container with ID starting with 9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d not found: ID does not exist" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.772245 4982 scope.go:117] "RemoveContainer" containerID="0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.774151 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4\": container with ID starting with 0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4 not found: ID does not exist" containerID="0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.774201 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4"} err="failed to get container status \"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4\": rpc error: code = NotFound desc = could not find container \"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4\": container with ID starting with 0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4 not found: ID does not exist" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.774217 4982 scope.go:117] "RemoveContainer" containerID="9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.774625 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d"} err="failed to get container status \"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d\": rpc error: code = NotFound desc = could not find container \"9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d\": container with ID starting with 9bb36d2958a4e7af85598bed16920c7bf47db7bcc5efe40a9732232c4386465d not found: ID does not exist" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.774643 4982 scope.go:117] "RemoveContainer" containerID="0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.777952 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4"} err="failed to get container status \"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4\": rpc error: code = NotFound desc = could not find container \"0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4\": container with ID starting with 0142fcabb077a8f09a4d3ee2f569822645109a54f78b12da578c3ec93916b8e4 not found: ID does not exist" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.777977 4982 scope.go:117] "RemoveContainer" containerID="3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.778056 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-b8kgd"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.787692 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-b8kgd"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.814826 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-61bb-account-create-update-56s5n"] Jan 22 06:10:37 crc kubenswrapper[4982]: E0122 06:10:37.815461 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-gvbg6 operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-61bb-account-create-update-56s5n" podUID="f394216d-b819-4d0d-b8f8-4195c9562621" Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.885894 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.912933 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-777f-account-create-update-ssrcg"] Jan 22 06:10:37 crc kubenswrapper[4982]: I0122 06:10:37.925895 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.026200 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.039933 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.039995 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts podName:9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:39.039980835 +0000 UTC m=+1499.878618838 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts") pod "root-account-create-update-b7g85" (UID: "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21") : configmap "openstack-scripts" not found Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.044102 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.047437 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.064542 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-6b4bc7b64f-sqvw9"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.073969 4982 scope.go:117] "RemoveContainer" containerID="b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.094640 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.110631 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-a8f0-account-create-update-nclgk"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.131114 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.146824 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgfm4\" (UniqueName: \"kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4\") pod \"07b70872-6840-498b-be43-290f43590bb9\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.146964 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs\") pod \"07b70872-6840-498b-be43-290f43590bb9\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.147020 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle\") pod \"07b70872-6840-498b-be43-290f43590bb9\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.147088 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config\") pod \"07b70872-6840-498b-be43-290f43590bb9\" (UID: \"07b70872-6840-498b-be43-290f43590bb9\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.147343 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.147390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvbg6\" (UniqueName: \"kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.149754 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a886-account-create-update-l7kpg"] Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.151987 4982 projected.go:194] Error preparing data for projected volume kube-api-access-gvbg6 for pod openstack/keystone-61bb-account-create-update-56s5n: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.152059 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6 podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:39.152039116 +0000 UTC m=+1499.990677119 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-gvbg6" (UniqueName: "kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.152136 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.152209 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:39.15219197 +0000 UTC m=+1499.990829973 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : configmap "openstack-scripts" not found Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.156270 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.157767 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.158457 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4" (OuterVolumeSpecName: "kube-api-access-pgfm4") pod "07b70872-6840-498b-be43-290f43590bb9" (UID: "07b70872-6840-498b-be43-290f43590bb9"). InnerVolumeSpecName "kube-api-access-pgfm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.178925 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.189437 4982 scope.go:117] "RemoveContainer" containerID="3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.189550 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.190420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb\": container with ID starting with 3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb not found: ID does not exist" containerID="3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.190481 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb"} err="failed to get container status \"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb\": rpc error: code = NotFound desc = could not find container \"3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb\": container with ID starting with 3287373bce9781dc69b47f25c25358cb2a575449c8b3fc92c2478acccb6376eb not found: ID does not exist" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.190533 4982 scope.go:117] "RemoveContainer" containerID="b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182" Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.190787 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182\": container with ID starting with b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182 not found: ID does not exist" containerID="b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.190810 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182"} err="failed to get container status \"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182\": rpc error: code = NotFound desc = could not find container \"b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182\": container with ID starting with b0a15b57306450b60cb280e0a0a3329be60599b1cd20993a2da1387e2ef7a182 not found: ID does not exist" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.193672 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-bda6-account-create-update-jwx8g"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.205093 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:56996->10.217.0.205:8775: read: connection reset by peer" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.205396 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:57012->10.217.0.205:8775: read: connection reset by peer" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.212970 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "07b70872-6840-498b-be43-290f43590bb9" (UID: "07b70872-6840-498b-be43-290f43590bb9"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.248385 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.248992 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.249136 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.249639 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgfm4\" (UniqueName: \"kubernetes.io/projected/07b70872-6840-498b-be43-290f43590bb9-kube-api-access-pgfm4\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.249702 4982 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.255201 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts" (OuterVolumeSpecName: "scripts") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.321990 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68c6f69c56-rbr89" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:52984->10.217.0.160:9311: read: connection reset by peer" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.322948 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68c6f69c56-rbr89" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:52970->10.217.0.160:9311: read: connection reset by peer" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.327727 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07b70872-6840-498b-be43-290f43590bb9" (UID: "07b70872-6840-498b-be43-290f43590bb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.327734 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "07b70872-6840-498b-be43-290f43590bb9" (UID: "07b70872-6840-498b-be43-290f43590bb9"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.350839 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brr4z\" (UniqueName: \"kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.353045 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.353622 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.353716 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs\") pod \"7ba39683-f142-405a-a1c8-83841c5b2cd0\" (UID: \"7ba39683-f142-405a-a1c8-83841c5b2cd0\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.354370 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs" (OuterVolumeSpecName: "logs") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.354529 4982 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.354597 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.354652 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07b70872-6840-498b-be43-290f43590bb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.354902 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z" (OuterVolumeSpecName: "kube-api-access-brr4z") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "kube-api-access-brr4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.356612 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="galera" containerID="cri-o://8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0" gracePeriod=30 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.426467 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data" (OuterVolumeSpecName: "config-data") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.433357 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.457065 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brr4z\" (UniqueName: \"kubernetes.io/projected/7ba39683-f142-405a-a1c8-83841c5b2cd0-kube-api-access-brr4z\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.457092 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.457103 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ba39683-f142-405a-a1c8-83841c5b2cd0-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.457112 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.502095 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.508942 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7ba39683-f142-405a-a1c8-83841c5b2cd0" (UID: "7ba39683-f142-405a-a1c8-83841c5b2cd0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.513026 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560386 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmt2g\" (UniqueName: \"kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560453 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560558 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560665 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560732 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.560761 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id\") pod \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\" (UID: \"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e\") " Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.561188 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.561202 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ba39683-f142-405a-a1c8-83841c5b2cd0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.561658 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs" (OuterVolumeSpecName: "logs") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.561981 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.566937 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g" (OuterVolumeSpecName: "kube-api-access-tmt2g") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "kube-api-access-tmt2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.569925 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.572556 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts" (OuterVolumeSpecName: "scripts") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.588599 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.662928 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data" (OuterVolumeSpecName: "config-data") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.663910 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmt2g\" (UniqueName: \"kubernetes.io/projected/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-kube-api-access-tmt2g\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.663946 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.663957 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.663968 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.663978 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.664007 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.664017 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.674065 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerID="3ebd5e2b4a01d8d16123bc5c692c42b9c4f85f654043c5e5f979425f4bc1aaba" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.674120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerDied","Data":"3ebd5e2b4a01d8d16123bc5c692c42b9c4f85f654043c5e5f979425f4bc1aaba"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.712686 4982 generic.go:334] "Generic (PLEG): container finished" podID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerID="eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.712948 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerDied","Data":"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.712974 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-664cd4b9c4-ccktp" event={"ID":"7ba39683-f142-405a-a1c8-83841c5b2cd0","Type":"ContainerDied","Data":"e5bef35f7cbb99b1c6eafa9f5060bc281d1c241d43dbf0cc23ef2d3fb06cb9f3"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.712993 4982 scope.go:117] "RemoveContainer" containerID="eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.713093 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-664cd4b9c4-ccktp" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.731229 4982 generic.go:334] "Generic (PLEG): container finished" podID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerID="a49fa6161cdc0ad0eb335a0a25470144a700d11c752945b8fa21ab70997554c3" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.731283 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerDied","Data":"a49fa6161cdc0ad0eb335a0a25470144a700d11c752945b8fa21ab70997554c3"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.739515 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.740766 4982 generic.go:334] "Generic (PLEG): container finished" podID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerID="9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.740908 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.741361 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerDied","Data":"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.741398 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0be45e9e-2ce7-4c9b-abee-2e0249e6e53e","Type":"ContainerDied","Data":"e27d0ca0c3c80eae3ef5046e6841df64657571a68ff0e08f536c7206324926f0"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.743731 4982 generic.go:334] "Generic (PLEG): container finished" podID="681a16ff-1468-4a9a-a692-5461230072bd" containerID="fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.743744 4982 generic.go:334] "Generic (PLEG): container finished" podID="681a16ff-1468-4a9a-a692-5461230072bd" containerID="7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.743776 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerDied","Data":"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.743794 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerDied","Data":"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.747364 4982 generic.go:334] "Generic (PLEG): container finished" podID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerID="cefbf932aefddcd806c99682cdfc21a0e80875bc3e02c87761c6e4e7b2e2288e" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.747416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerDied","Data":"cefbf932aefddcd806c99682cdfc21a0e80875bc3e02c87761c6e4e7b2e2288e"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.750028 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerID="92aa96879b03afd4dc777ec25243c37e4ffc13dea924e62623d2262a8332c39f" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.750076 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerDied","Data":"92aa96879b03afd4dc777ec25243c37e4ffc13dea924e62623d2262a8332c39f"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.756076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" (UID: "0be45e9e-2ce7-4c9b-abee-2e0249e6e53e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.758698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"07b70872-6840-498b-be43-290f43590bb9","Type":"ContainerDied","Data":"5278f8f984a36118f89c457097510209ba759e5381ece112d058a2b71f01f4d6"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.758741 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.765166 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.765187 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.777807 4982 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-b7g85" secret="" err="secret \"galera-openstack-dockercfg-m2mgh\" not found" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.777866 4982 scope.go:117] "RemoveContainer" containerID="a6ac05cf7240c43f23092e380fa1df8582d0653aa59e96d2961d318758638b64" Jan 22 06:10:38 crc kubenswrapper[4982]: E0122 06:10:38.778046 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-b7g85_openstack(9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21)\"" pod="openstack/root-account-create-update-b7g85" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.788161 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" containerID="98cf6e03c88eac06c166fdc5c9a1486e6fc1f8e13aa0eff354dcd9303c5e399f" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.788255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df","Type":"ContainerDied","Data":"98cf6e03c88eac06c166fdc5c9a1486e6fc1f8e13aa0eff354dcd9303c5e399f"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.798971 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.806862 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-664cd4b9c4-ccktp"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.821456 4982 generic.go:334] "Generic (PLEG): container finished" podID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerID="33fb07736ff02fa3a7d27e62002c81a800f65f43ed61da9bdbe44cbe612b4ebe" exitCode=0 Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.821533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.822037 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerDied","Data":"33fb07736ff02fa3a7d27e62002c81a800f65f43ed61da9bdbe44cbe612b4ebe"} Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.824472 4982 scope.go:117] "RemoveContainer" containerID="c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.908714 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.913823 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.924509 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/memcached-0" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.106:11211: connect: connection refused" Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.934101 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 06:10:38 crc kubenswrapper[4982]: I0122 06:10:38.937424 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.007635 4982 scope.go:117] "RemoveContainer" containerID="eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.011094 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03\": container with ID starting with eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03 not found: ID does not exist" containerID="eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.011141 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03"} err="failed to get container status \"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03\": rpc error: code = NotFound desc = could not find container \"eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03\": container with ID starting with eef7e59c675432bec99f0f98a0e39c55d8cace43d4af135404063d40d0eaba03 not found: ID does not exist" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.011167 4982 scope.go:117] "RemoveContainer" containerID="c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.011626 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897\": container with ID starting with c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897 not found: ID does not exist" containerID="c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.011675 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897"} err="failed to get container status \"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897\": rpc error: code = NotFound desc = could not find container \"c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897\": container with ID starting with c7f03d3fdd742f290077c8c859e03cbb33c8046802e0420e28b5720902b21897 not found: ID does not exist" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.011703 4982 scope.go:117] "RemoveContainer" containerID="9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.028502 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.036080 4982 scope.go:117] "RemoveContainer" containerID="08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.041650 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.051163 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.061960 4982 scope.go:117] "RemoveContainer" containerID="9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.062367 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73\": container with ID starting with 9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73 not found: ID does not exist" containerID="9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.062396 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73"} err="failed to get container status \"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73\": rpc error: code = NotFound desc = could not find container \"9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73\": container with ID starting with 9e43b1e70c6df504b6660b1a17c284b8c0388a6042530649a62f0a956e4e4d73 not found: ID does not exist" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.062415 4982 scope.go:117] "RemoveContainer" containerID="08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.062831 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb\": container with ID starting with 08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb not found: ID does not exist" containerID="08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.062865 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb"} err="failed to get container status \"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb\": rpc error: code = NotFound desc = could not find container \"08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb\": container with ID starting with 08893a5df354c2f32c7b9ca210a4a1f261a5ebf3545f01c8f59952a61d346ecb not found: ID does not exist" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.062881 4982 scope.go:117] "RemoveContainer" containerID="addbc96e6718c32e48886fcf4a4c3eca72d548b434d5453710a2fcc4daabe338" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.072046 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs\") pod \"53013384-1492-4c2e-9c7a-cd81d4d07018\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.072141 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data\") pod \"53013384-1492-4c2e-9c7a-cd81d4d07018\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.072192 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs\") pod \"53013384-1492-4c2e-9c7a-cd81d4d07018\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.072290 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2g6c\" (UniqueName: \"kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c\") pod \"53013384-1492-4c2e-9c7a-cd81d4d07018\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.072340 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle\") pod \"53013384-1492-4c2e-9c7a-cd81d4d07018\" (UID: \"53013384-1492-4c2e-9c7a-cd81d4d07018\") " Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.072707 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.072747 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts podName:9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:41.072734535 +0000 UTC m=+1501.911372538 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts") pod "root-account-create-update-b7g85" (UID: "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21") : configmap "openstack-scripts" not found Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.073351 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs" (OuterVolumeSpecName: "logs") pod "53013384-1492-4c2e-9c7a-cd81d4d07018" (UID: "53013384-1492-4c2e-9c7a-cd81d4d07018"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.079709 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c" (OuterVolumeSpecName: "kube-api-access-z2g6c") pod "53013384-1492-4c2e-9c7a-cd81d4d07018" (UID: "53013384-1492-4c2e-9c7a-cd81d4d07018"). InnerVolumeSpecName "kube-api-access-z2g6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.099649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data" (OuterVolumeSpecName: "config-data") pod "53013384-1492-4c2e-9c7a-cd81d4d07018" (UID: "53013384-1492-4c2e-9c7a-cd81d4d07018"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.111390 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53013384-1492-4c2e-9c7a-cd81d4d07018" (UID: "53013384-1492-4c2e-9c7a-cd81d4d07018"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.168227 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.173706 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.173767 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.173891 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.173946 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.173982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174016 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174041 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174091 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174123 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tsxg\" (UniqueName: \"kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174147 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174162 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174180 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2r6w\" (UniqueName: \"kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174217 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174233 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz95l\" (UniqueName: \"kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174248 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174278 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174312 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts\") pod \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\" (UID: \"8d77f6f1-5c7d-45e5-92e5-8e333d91c020\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174340 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle\") pod \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\" (UID: \"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174357 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174374 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data\") pod \"2b565c71-916f-40d6-aa91-de5fd3a323d6\" (UID: \"2b565c71-916f-40d6-aa91-de5fd3a323d6\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174548 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvbg6\" (UniqueName: \"kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174688 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts\") pod \"keystone-61bb-account-create-update-56s5n\" (UID: \"f394216d-b819-4d0d-b8f8-4195c9562621\") " pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174949 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2g6c\" (UniqueName: \"kubernetes.io/projected/53013384-1492-4c2e-9c7a-cd81d4d07018-kube-api-access-z2g6c\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174962 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174972 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.174979 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53013384-1492-4c2e-9c7a-cd81d4d07018-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.175027 4982 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.175098 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:41.1750851 +0000 UTC m=+1502.013723103 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : configmap "openstack-scripts" not found Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.177258 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "53013384-1492-4c2e-9c7a-cd81d4d07018" (UID: "53013384-1492-4c2e-9c7a-cd81d4d07018"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.178520 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.180426 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts" (OuterVolumeSpecName: "scripts") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.180912 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.204:3000/\": dial tcp 10.217.0.204:3000: connect: connection refused" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.181176 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs" (OuterVolumeSpecName: "logs") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.181357 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs" (OuterVolumeSpecName: "logs") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.197137 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs" (OuterVolumeSpecName: "logs") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.197176 4982 projected.go:194] Error preparing data for projected volume kube-api-access-gvbg6 for pod openstack/keystone-61bb-account-create-update-56s5n: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:39 crc kubenswrapper[4982]: E0122 06:10:39.197984 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6 podName:f394216d-b819-4d0d-b8f8-4195c9562621 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:41.197960614 +0000 UTC m=+1502.036598617 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-gvbg6" (UniqueName: "kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6") pod "keystone-61bb-account-create-update-56s5n" (UID: "f394216d-b819-4d0d-b8f8-4195c9562621") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.200340 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.202150 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.202871 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l" (OuterVolumeSpecName: "kube-api-access-dz95l") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "kube-api-access-dz95l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.205981 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w" (OuterVolumeSpecName: "kube-api-access-w2r6w") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "kube-api-access-w2r6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.209954 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg" (OuterVolumeSpecName: "kube-api-access-4tsxg") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "kube-api-access-4tsxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.215753 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.227477 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.248596 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277103 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277133 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/53013384-1492-4c2e-9c7a-cd81d4d07018-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277142 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277150 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tsxg\" (UniqueName: \"kubernetes.io/projected/2b565c71-916f-40d6-aa91-de5fd3a323d6-kube-api-access-4tsxg\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277159 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277182 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277193 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2r6w\" (UniqueName: \"kubernetes.io/projected/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-kube-api-access-w2r6w\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277202 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277211 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz95l\" (UniqueName: \"kubernetes.io/projected/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-kube-api-access-dz95l\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277220 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277227 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b565c71-916f-40d6-aa91-de5fd3a323d6-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.277235 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.296002 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.327823 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.350795 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.363975 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data" (OuterVolumeSpecName: "config-data") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.377771 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.378077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.378280 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.378839 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.382479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj245\" (UniqueName: \"kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.382777 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.382997 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.383113 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data\") pod \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\" (UID: \"ac0dc6e4-35d9-4d06-93af-a8758648aa13\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.382926 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.383326 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs" (OuterVolumeSpecName: "logs") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.383806 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.383957 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.384015 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.384066 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.384125 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac0dc6e4-35d9-4d06-93af-a8758648aa13-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.383907 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8d77f6f1-5c7d-45e5-92e5-8e333d91c020" (UID: "8d77f6f1-5c7d-45e5-92e5-8e333d91c020"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.386061 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245" (OuterVolumeSpecName: "kube-api-access-sj245") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "kube-api-access-sj245". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.387060 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.388005 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data" (OuterVolumeSpecName: "config-data") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.391707 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts" (OuterVolumeSpecName: "scripts") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.397407 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0ce93171-ff81-4a46-9813-2807930a945c/ovn-northd/0.log" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.397480 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.401883 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.402466 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.404550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.409913 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data" (OuterVolumeSpecName: "config-data") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.410049 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.410809 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2b565c71-916f-40d6-aa91-de5fd3a323d6" (UID: "2b565c71-916f-40d6-aa91-de5fd3a323d6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.411041 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" (UID: "1669f2c1-5f4c-4a3e-ae64-5d754ec522bc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.423121 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.453090 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.466000 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data" (OuterVolumeSpecName: "config-data") pod "ac0dc6e4-35d9-4d06-93af-a8758648aa13" (UID: "ac0dc6e4-35d9-4d06-93af-a8758648aa13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494301 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config\") pod \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494352 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494372 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle\") pod \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494403 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494444 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data\") pod \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494476 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494497 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494526 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsnfh\" (UniqueName: \"kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh\") pod \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494548 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle\") pod \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494573 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494632 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ncwj\" (UniqueName: \"kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj\") pod \"0ce93171-ff81-4a46-9813-2807930a945c\" (UID: \"0ce93171-ff81-4a46-9813-2807930a945c\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494666 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs\") pod \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data\") pod \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\" (UID: \"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.494703 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69dwn\" (UniqueName: \"kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn\") pod \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\" (UID: \"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e\") " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495044 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495063 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495077 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj245\" (UniqueName: \"kubernetes.io/projected/ac0dc6e4-35d9-4d06-93af-a8758648aa13-kube-api-access-sj245\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495086 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495095 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495104 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d77f6f1-5c7d-45e5-92e5-8e333d91c020-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495113 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495123 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495130 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495139 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac0dc6e4-35d9-4d06-93af-a8758648aa13-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495148 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495156 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495174 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.495183 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b565c71-916f-40d6-aa91-de5fd3a323d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.497783 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" (UID: "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.498607 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts" (OuterVolumeSpecName: "scripts") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.498792 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data" (OuterVolumeSpecName: "config-data") pod "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" (UID: "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.499169 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config" (OuterVolumeSpecName: "config") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.501801 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.502256 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh" (OuterVolumeSpecName: "kube-api-access-qsnfh") pod "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" (UID: "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df"). InnerVolumeSpecName "kube-api-access-qsnfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.507564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn" (OuterVolumeSpecName: "kube-api-access-69dwn") pod "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" (UID: "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e"). InnerVolumeSpecName "kube-api-access-69dwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.512206 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.513156 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj" (OuterVolumeSpecName: "kube-api-access-8ncwj") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "kube-api-access-8ncwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.532939 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" (UID: "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.533936 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data" (OuterVolumeSpecName: "config-data") pod "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" (UID: "0e2e1a17-8278-4fe9-a83e-aeb0a61c69df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.558391 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" (UID: "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.580210 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.582963 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.587392 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" (UID: "0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597518 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597552 4982 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597563 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597574 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597586 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597595 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597605 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ce93171-ff81-4a46-9813-2807930a945c-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597615 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597626 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsnfh\" (UniqueName: \"kubernetes.io/projected/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-kube-api-access-qsnfh\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597637 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597648 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ce93171-ff81-4a46-9813-2807930a945c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597657 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ncwj\" (UniqueName: \"kubernetes.io/projected/0ce93171-ff81-4a46-9813-2807930a945c-kube-api-access-8ncwj\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597668 4982 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597678 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.597689 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69dwn\" (UniqueName: \"kubernetes.io/projected/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e-kube-api-access-69dwn\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.609279 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0ce93171-ff81-4a46-9813-2807930a945c" (UID: "0ce93171-ff81-4a46-9813-2807930a945c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.698746 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ce93171-ff81-4a46-9813-2807930a945c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.735776 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07b70872-6840-498b-be43-290f43590bb9" path="/var/lib/kubelet/pods/07b70872-6840-498b-be43-290f43590bb9/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.736483 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" path="/var/lib/kubelet/pods/0be45e9e-2ce7-4c9b-abee-2e0249e6e53e/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.738044 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="536304e2-bf6d-4c4a-a617-69be7bf2931e" path="/var/lib/kubelet/pods/536304e2-bf6d-4c4a-a617-69be7bf2931e/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.740513 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66a74d17-3793-4757-b423-503ee60fc4d5" path="/var/lib/kubelet/pods/66a74d17-3793-4757-b423-503ee60fc4d5/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.741450 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" path="/var/lib/kubelet/pods/7ba39683-f142-405a-a1c8-83841c5b2cd0/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.742226 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd4f5182-1143-4df0-93c1-33e344a1a204" path="/var/lib/kubelet/pods/bd4f5182-1143-4df0-93c1-33e344a1a204/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.743387 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff36695-f9e7-4506-95b2-a1c99296fe75" path="/var/lib/kubelet/pods/bff36695-f9e7-4506-95b2-a1c99296fe75/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.743826 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d24e3505-d3d7-49fa-80fd-081138916d20" path="/var/lib/kubelet/pods/d24e3505-d3d7-49fa-80fd-081138916d20/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.744439 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de93226c-14b2-42b2-a553-2535096d7b81" path="/var/lib/kubelet/pods/de93226c-14b2-42b2-a553-2535096d7b81/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.744932 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dffeeb11-fe93-4cb1-ad5c-e77fbe664a70" path="/var/lib/kubelet/pods/dffeeb11-fe93-4cb1-ad5c-e77fbe664a70/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.745359 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff35669-e559-41ee-9a8b-114907cd6ab0" path="/var/lib/kubelet/pods/eff35669-e559-41ee-9a8b-114907cd6ab0/volumes" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.841005 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.841030 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53013384-1492-4c2e-9c7a-cd81d4d07018","Type":"ContainerDied","Data":"999b2f976f3b75a88d1743b252bb3a98cab63017dcb130885c70d48f61d72de6"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.841101 4982 scope.go:117] "RemoveContainer" containerID="a49fa6161cdc0ad0eb335a0a25470144a700d11c752945b8fa21ab70997554c3" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.846281 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ac0dc6e4-35d9-4d06-93af-a8758648aa13","Type":"ContainerDied","Data":"1f9576f987deb791cb03e0124573345022309bef7341248f8305569cc09baf0b"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.846465 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.869888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1669f2c1-5f4c-4a3e-ae64-5d754ec522bc","Type":"ContainerDied","Data":"c98db23ad978aec75df3796c99eca0103d2811f3f463ff7edded7cbaca2205c9"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.869972 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.887118 4982 generic.go:334] "Generic (PLEG): container finished" podID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerID="f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d" exitCode=0 Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.887582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e","Type":"ContainerDied","Data":"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.887675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e","Type":"ContainerDied","Data":"1124733371d0c1be6191c0211899f63d41958d4127cb4c8023b3908a10873171"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.887679 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.894489 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d77f6f1-5c7d-45e5-92e5-8e333d91c020","Type":"ContainerDied","Data":"4d79dd43059edfedd4ad44a90b967317104e74eee098c9202db37016f5f2ecb1"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.901277 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.917035 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68c6f69c56-rbr89" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.917133 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68c6f69c56-rbr89" event={"ID":"2b565c71-916f-40d6-aa91-de5fd3a323d6","Type":"ContainerDied","Data":"b426b351c96540a971c129061668b3e6c61efe6a37d38c17deb6a92d4871eaf1"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.934725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0e2e1a17-8278-4fe9-a83e-aeb0a61c69df","Type":"ContainerDied","Data":"12a2afa7d6de99f0270cfee379971747ac5d3b5cf3711176012880b96a95c85d"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.934988 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0ce93171-ff81-4a46-9813-2807930a945c/ovn-northd/0.log" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938636 4982 generic.go:334] "Generic (PLEG): container finished" podID="0ce93171-ff81-4a46-9813-2807930a945c" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" exitCode=139 Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938719 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938740 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-61bb-account-create-update-56s5n" Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938789 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerDied","Data":"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde"} Jan 22 06:10:39 crc kubenswrapper[4982]: I0122 06:10:39.938824 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0ce93171-ff81-4a46-9813-2807930a945c","Type":"ContainerDied","Data":"8bc005981d51764ef9c8824a7b1f3535840fc641063c2d7fb9f5188b0f3357a3"} Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.030334 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.038888 4982 scope.go:117] "RemoveContainer" containerID="acd9ebde3d0e0f8fe4696b287db22cd21891b45ba0969952821a0ef3dc3ecee6" Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.044506 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.046182 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.046215 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerName="nova-cell0-conductor-conductor" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.095027 4982 scope.go:117] "RemoveContainer" containerID="33fb07736ff02fa3a7d27e62002c81a800f65f43ed61da9bdbe44cbe612b4ebe" Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.108990 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.109060 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data podName:7623a66b-412d-4202-bd05-58fba1c6a3d3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:48.109045619 +0000 UTC m=+1508.947683612 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data") pod "rabbitmq-server-0" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3") : configmap "rabbitmq-config-data" not found Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.183374 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-61bb-account-create-update-56s5n"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.187210 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-61bb-account-create-update-56s5n"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.191559 4982 scope.go:117] "RemoveContainer" containerID="c3a1f31f2494aee180667a0f03331fe892eadeaa196337253d864cc858d0d3f1" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.197725 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.204826 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.215093 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.225636 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.230726 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.244234 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-68c6f69c56-rbr89"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.262918 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.264205 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.269446 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.271666 4982 scope.go:117] "RemoveContainer" containerID="cefbf932aefddcd806c99682cdfc21a0e80875bc3e02c87761c6e4e7b2e2288e" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.275355 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.288127 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.298785 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.300063 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.305656 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.311839 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.312320 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f394216d-b819-4d0d-b8f8-4195c9562621-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.312337 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvbg6\" (UniqueName: \"kubernetes.io/projected/f394216d-b819-4d0d-b8f8-4195c9562621-kube-api-access-gvbg6\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.313005 4982 scope.go:117] "RemoveContainer" containerID="261f0791c2831229e062f5721d4579eac13d766cc88596aa36038f23c84e7149" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.317566 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.352213 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.365609 4982 scope.go:117] "RemoveContainer" containerID="f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.399073 4982 scope.go:117] "RemoveContainer" containerID="f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d" Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.399463 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d\": container with ID starting with f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d not found: ID does not exist" containerID="f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.399513 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d"} err="failed to get container status \"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d\": rpc error: code = NotFound desc = could not find container \"f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d\": container with ID starting with f2b7f771d32993fbe91c45e65c5bc7565b9b9c6611050ef239f416bde7c88a3d not found: ID does not exist" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.399541 4982 scope.go:117] "RemoveContainer" containerID="92aa96879b03afd4dc777ec25243c37e4ffc13dea924e62623d2262a8332c39f" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.428693 4982 scope.go:117] "RemoveContainer" containerID="2402516dd72b79b7045a0fea1c29ce7e8868f7e5288ff8c3baf2331b568a9fc7" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.461670 4982 scope.go:117] "RemoveContainer" containerID="3ebd5e2b4a01d8d16123bc5c692c42b9c4f85f654043c5e5f979425f4bc1aaba" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.485694 4982 scope.go:117] "RemoveContainer" containerID="6bdbcca209318e7de23c54a80c39908eea30839974039d5c924f9f3adf9c2b3d" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.502952 4982 scope.go:117] "RemoveContainer" containerID="98cf6e03c88eac06c166fdc5c9a1486e6fc1f8e13aa0eff354dcd9303c5e399f" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.529396 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts\") pod \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.529502 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-962fk\" (UniqueName: \"kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk\") pod \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\" (UID: \"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.532995 4982 scope.go:117] "RemoveContainer" containerID="a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.533490 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" (UID: "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.537082 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk" (OuterVolumeSpecName: "kube-api-access-962fk") pod "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" (UID: "9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21"). InnerVolumeSpecName "kube-api-access-962fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.574295 4982 scope.go:117] "RemoveContainer" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.631007 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.631033 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-962fk\" (UniqueName: \"kubernetes.io/projected/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21-kube-api-access-962fk\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.637070 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.644550 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.671378 4982 scope.go:117] "RemoveContainer" containerID="a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec" Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.677215 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec\": container with ID starting with a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec not found: ID does not exist" containerID="a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.677257 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec"} err="failed to get container status \"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec\": rpc error: code = NotFound desc = could not find container \"a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec\": container with ID starting with a14282bce1f209b3f1b5d12076473a67dc4c3e837318dad27ea3002e390683ec not found: ID does not exist" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.677283 4982 scope.go:117] "RemoveContainer" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.680304 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde\": container with ID starting with 752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde not found: ID does not exist" containerID="752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.680327 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde"} err="failed to get container status \"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde\": rpc error: code = NotFound desc = could not find container \"752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde\": container with ID starting with 752892d037f338ef999fab998236c266729937268421b318ed21e35e0b563dde not found: ID does not exist" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731482 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6c99\" (UniqueName: \"kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731521 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731549 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731670 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731785 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.731802 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated\") pod \"8c227aae-c987-448b-9545-febc060f0929\" (UID: \"8c227aae-c987-448b-9545-febc060f0929\") " Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.732150 4982 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:40 crc kubenswrapper[4982]: E0122 06:10:40.732201 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data podName:bb198f24-94e6-4569-be12-9ee57000a3e3 nodeName:}" failed. No retries permitted until 2026-01-22 06:10:48.732188141 +0000 UTC m=+1509.570826144 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data") pod "rabbitmq-cell1-server-0" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.732377 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.732647 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.733086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.733806 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.740193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99" (OuterVolumeSpecName: "kube-api-access-w6c99") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "kube-api-access-w6c99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.745252 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.775040 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.777573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c227aae-c987-448b-9545-febc060f0929" (UID: "8c227aae-c987-448b-9545-febc060f0929"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.833954 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.833994 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8c227aae-c987-448b-9545-febc060f0929-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834010 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6c99\" (UniqueName: \"kubernetes.io/projected/8c227aae-c987-448b-9545-febc060f0929-kube-api-access-w6c99\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834022 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834034 4982 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8c227aae-c987-448b-9545-febc060f0929-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834047 4982 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834076 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.834093 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c227aae-c987-448b-9545-febc060f0929-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.904938 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.935919 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.959007 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" probeResult="failure" output="command timed out" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.993487 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-f9754c45-wzfk5" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.159:9696/\": dial tcp 10.217.0.159:9696: connect: connection refused" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.997676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-b7g85" event={"ID":"9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21","Type":"ContainerDied","Data":"0a3d0fd591175de11974115910232896e7d5b8a77f3abb830b042319dde5c9bf"} Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.997728 4982 scope.go:117] "RemoveContainer" containerID="a6ac05cf7240c43f23092e380fa1df8582d0653aa59e96d2961d318758638b64" Jan 22 06:10:40 crc kubenswrapper[4982]: I0122 06:10:40.997785 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-b7g85" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.029404 4982 generic.go:334] "Generic (PLEG): container finished" podID="8c227aae-c987-448b-9545-febc060f0929" containerID="8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0" exitCode=0 Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.029471 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerDied","Data":"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0"} Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.029497 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8c227aae-c987-448b-9545-febc060f0929","Type":"ContainerDied","Data":"5e9bada00b9fa32a56bda981a2d0e0381b1485890760f4c4185530e9aadbb490"} Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.029553 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.062119 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" probeResult="failure" output=< Jan 22 06:10:41 crc kubenswrapper[4982]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Jan 22 06:10:41 crc kubenswrapper[4982]: > Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.064844 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.089475 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.089568 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.104023 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.104206 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.105053 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.105146 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.105676 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.106461 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.123450 4982 scope.go:117] "RemoveContainer" containerID="8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.131653 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.137287 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-b7g85"] Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.143358 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.148384 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.318774 4982 scope.go:117] "RemoveContainer" containerID="6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.368650 4982 scope.go:117] "RemoveContainer" containerID="8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0" Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.371118 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0\": container with ID starting with 8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0 not found: ID does not exist" containerID="8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.371166 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0"} err="failed to get container status \"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0\": rpc error: code = NotFound desc = could not find container \"8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0\": container with ID starting with 8ad613a941dc0aff2c00457b54e707c691926782ca16bc1ae2e5970c5dbdaac0 not found: ID does not exist" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.371192 4982 scope.go:117] "RemoveContainer" containerID="6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3" Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.371689 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3\": container with ID starting with 6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3 not found: ID does not exist" containerID="6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.371728 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3"} err="failed to get container status \"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3\": rpc error: code = NotFound desc = could not find container \"6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3\": container with ID starting with 6f0770f82963bafc337371320c87e348518a8543676d5a4adc6809130236c9d3 not found: ID does not exist" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.398768 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.488697 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489038 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489062 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489124 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489156 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489195 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489211 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489315 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lb4s\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489338 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489367 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489391 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins\") pod \"7623a66b-412d-4202-bd05-58fba1c6a3d3\" (UID: \"7623a66b-412d-4202-bd05-58fba1c6a3d3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.489588 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.490233 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.490659 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.492122 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.498822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.498877 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info" (OuterVolumeSpecName: "pod-info") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.499277 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.501374 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.502108 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s" (OuterVolumeSpecName: "kube-api-access-5lb4s") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "kube-api-access-5lb4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.512173 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data" (OuterVolumeSpecName: "config-data") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.554787 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf" (OuterVolumeSpecName: "server-conf") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591387 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lb4s\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-kube-api-access-5lb4s\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591432 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591443 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591451 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591460 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7623a66b-412d-4202-bd05-58fba1c6a3d3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591468 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7623a66b-412d-4202-bd05-58fba1c6a3d3-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591477 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591486 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.591493 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7623a66b-412d-4202-bd05-58fba1c6a3d3-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.605979 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7623a66b-412d-4202-bd05-58fba1c6a3d3" (UID: "7623a66b-412d-4202-bd05-58fba1c6a3d3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.607595 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.675020 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.697281 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.697311 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7623a66b-412d-4202-bd05-58fba1c6a3d3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.707871 4982 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 22 06:10:41 crc kubenswrapper[4982]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-22T06:10:34Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 06:10:41 crc kubenswrapper[4982]: /etc/init.d/functions: line 589: 397 Alarm clock "$@" Jan 22 06:10:41 crc kubenswrapper[4982]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-4z48g" message=< Jan 22 06:10:41 crc kubenswrapper[4982]: Exiting ovn-controller (1) [FAILED] Jan 22 06:10:41 crc kubenswrapper[4982]: Killing ovn-controller (1) [ OK ] Jan 22 06:10:41 crc kubenswrapper[4982]: 2026-01-22T06:10:34Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 06:10:41 crc kubenswrapper[4982]: /etc/init.d/functions: line 589: 397 Alarm clock "$@" Jan 22 06:10:41 crc kubenswrapper[4982]: > Jan 22 06:10:41 crc kubenswrapper[4982]: E0122 06:10:41.707936 4982 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 22 06:10:41 crc kubenswrapper[4982]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-22T06:10:34Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 06:10:41 crc kubenswrapper[4982]: /etc/init.d/functions: line 589: 397 Alarm clock "$@" Jan 22 06:10:41 crc kubenswrapper[4982]: > pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" containerID="cri-o://e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.707980 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-4z48g" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" containerID="cri-o://e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5" gracePeriod=22 Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.731947 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ce93171-ff81-4a46-9813-2807930a945c" path="/var/lib/kubelet/pods/0ce93171-ff81-4a46-9813-2807930a945c/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.732704 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" path="/var/lib/kubelet/pods/0e2e1a17-8278-4fe9-a83e-aeb0a61c69df/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.733426 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" path="/var/lib/kubelet/pods/0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.734725 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" path="/var/lib/kubelet/pods/1669f2c1-5f4c-4a3e-ae64-5d754ec522bc/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.735437 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" path="/var/lib/kubelet/pods/2b565c71-916f-40d6-aa91-de5fd3a323d6/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.736237 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" path="/var/lib/kubelet/pods/53013384-1492-4c2e-9c7a-cd81d4d07018/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.737603 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c227aae-c987-448b-9545-febc060f0929" path="/var/lib/kubelet/pods/8c227aae-c987-448b-9545-febc060f0929/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.738425 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" path="/var/lib/kubelet/pods/8d77f6f1-5c7d-45e5-92e5-8e333d91c020/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.740244 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" path="/var/lib/kubelet/pods/9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.741330 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" path="/var/lib/kubelet/pods/ac0dc6e4-35d9-4d06-93af-a8758648aa13/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.745274 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f394216d-b819-4d0d-b8f8-4195c9562621" path="/var/lib/kubelet/pods/f394216d-b819-4d0d-b8f8-4195c9562621/volumes" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798628 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqpw8\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798711 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798761 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798786 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798803 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798901 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798926 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798970 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.798991 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"bb198f24-94e6-4569-be12-9ee57000a3e3\" (UID: \"bb198f24-94e6-4569-be12-9ee57000a3e3\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.800367 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.800768 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.802185 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.804472 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8" (OuterVolumeSpecName: "kube-api-access-wqpw8") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "kube-api-access-wqpw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.805467 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.810325 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info" (OuterVolumeSpecName: "pod-info") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.810555 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.810663 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.814689 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.818735 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data" (OuterVolumeSpecName: "config-data") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.839109 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf" (OuterVolumeSpecName: "server-conf") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905578 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s68vt\" (UniqueName: \"kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905622 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905657 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905679 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905727 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905782 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.905801 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data\") pod \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\" (UID: \"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c\") " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906116 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906136 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906156 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906166 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqpw8\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-kube-api-access-wqpw8\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906177 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906185 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb198f24-94e6-4569-be12-9ee57000a3e3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906193 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906201 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb198f24-94e6-4569-be12-9ee57000a3e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906210 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb198f24-94e6-4569-be12-9ee57000a3e3-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.906225 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.907884 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "bb198f24-94e6-4569-be12-9ee57000a3e3" (UID: "bb198f24-94e6-4569-be12-9ee57000a3e3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.910527 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.913927 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.917079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts" (OuterVolumeSpecName: "scripts") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.920419 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt" (OuterVolumeSpecName: "kube-api-access-s68vt") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "kube-api-access-s68vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.922298 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.940611 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data" (OuterVolumeSpecName: "config-data") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.940693 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.959982 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:41 crc kubenswrapper[4982]: I0122 06:10:41.960675 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" (UID: "e9f36aaa-d627-4dfe-ab21-b5bedee0a25c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007589 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007865 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007881 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007890 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007898 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb198f24-94e6-4569-be12-9ee57000a3e3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007906 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s68vt\" (UniqueName: \"kubernetes.io/projected/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-kube-api-access-s68vt\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007915 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007923 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007930 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.007938 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.097489 4982 generic.go:334] "Generic (PLEG): container finished" podID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerID="a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727" exitCode=0 Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.097599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerDied","Data":"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.097684 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7623a66b-412d-4202-bd05-58fba1c6a3d3","Type":"ContainerDied","Data":"7df6b506e283f74ec13809ebb9b8530f53ed35eea3c42c90f1517dfd93582813"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.097702 4982 scope.go:117] "RemoveContainer" containerID="a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.097827 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.101021 4982 generic.go:334] "Generic (PLEG): container finished" podID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerID="5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132" exitCode=0 Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.101117 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.101677 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerDied","Data":"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.101710 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bb198f24-94e6-4569-be12-9ee57000a3e3","Type":"ContainerDied","Data":"6bcae2387f08466b835ab90ceae11cd06eaebd809d7f73f39e492c3ee8c39c3e"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.109667 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4z48g_29f503d7-a98b-4227-b9d9-865db16a2552/ovn-controller/0.log" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.109707 4982 generic.go:334] "Generic (PLEG): container finished" podID="29f503d7-a98b-4227-b9d9-865db16a2552" containerID="e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5" exitCode=139 Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.109750 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g" event={"ID":"29f503d7-a98b-4227-b9d9-865db16a2552","Type":"ContainerDied","Data":"e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.111614 4982 generic.go:334] "Generic (PLEG): container finished" podID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" containerID="d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf" exitCode=0 Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.111685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c5dd486cd-r6bbs" event={"ID":"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c","Type":"ContainerDied","Data":"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.111726 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c5dd486cd-r6bbs" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.111885 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c5dd486cd-r6bbs" event={"ID":"e9f36aaa-d627-4dfe-ab21-b5bedee0a25c","Type":"ContainerDied","Data":"674e5b35a4f913f3939a4f526e18e8cd5bf32d30da8a86756283841fe83e39e8"} Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.117390 4982 scope.go:117] "RemoveContainer" containerID="3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.124469 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.135768 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.152584 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.171900 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.184073 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.188934 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7c5dd486cd-r6bbs"] Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.190080 4982 scope.go:117] "RemoveContainer" containerID="a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.191279 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727\": container with ID starting with a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727 not found: ID does not exist" containerID="a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.191320 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727"} err="failed to get container status \"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727\": rpc error: code = NotFound desc = could not find container \"a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727\": container with ID starting with a183af96beefaed029587f49bdd19bf06abd4b4c476f917c768fe13137f46727 not found: ID does not exist" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.191353 4982 scope.go:117] "RemoveContainer" containerID="3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.192140 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f\": container with ID starting with 3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f not found: ID does not exist" containerID="3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.192170 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f"} err="failed to get container status \"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f\": rpc error: code = NotFound desc = could not find container \"3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f\": container with ID starting with 3fe91b4130b040ab517d1434d08c73fd677beaa39c9b2607e5be38b57487bb3f not found: ID does not exist" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.192185 4982 scope.go:117] "RemoveContainer" containerID="5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.283044 4982 scope.go:117] "RemoveContainer" containerID="74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.486069 4982 scope.go:117] "RemoveContainer" containerID="5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.490525 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132\": container with ID starting with 5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132 not found: ID does not exist" containerID="5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.490595 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132"} err="failed to get container status \"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132\": rpc error: code = NotFound desc = could not find container \"5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132\": container with ID starting with 5e0201502839733468893c507dd6c3694d211c79ab29f93898d777e9823bb132 not found: ID does not exist" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.490622 4982 scope.go:117] "RemoveContainer" containerID="74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.491031 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709\": container with ID starting with 74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709 not found: ID does not exist" containerID="74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.491053 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709"} err="failed to get container status \"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709\": rpc error: code = NotFound desc = could not find container \"74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709\": container with ID starting with 74c05473a6c9b13b228f742222176b7edbfe2f651ea4f00507ec3d80a3dd1709 not found: ID does not exist" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.491069 4982 scope.go:117] "RemoveContainer" containerID="d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.538031 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.543823 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.546327 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.546379 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.555413 4982 scope.go:117] "RemoveContainer" containerID="d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf" Jan 22 06:10:42 crc kubenswrapper[4982]: E0122 06:10:42.556027 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf\": container with ID starting with d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf not found: ID does not exist" containerID="d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.556063 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf"} err="failed to get container status \"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf\": rpc error: code = NotFound desc = could not find container \"d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf\": container with ID starting with d9527b25256bc946222c27f68c3f6bbab1d33657220aeaed03ede4fe9c3b35bf not found: ID does not exist" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.735968 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="07b70872-6840-498b-be43-290f43590bb9" containerName="kube-state-metrics" probeResult="failure" output="Get \"https://10.217.0.196:8081/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.778442 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.790448 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4z48g_29f503d7-a98b-4227-b9d9-865db16a2552/ovn-controller/0.log" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.790515 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.791489 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.821635 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.821683 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.821722 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom\") pod \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.821750 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822119 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnb89\" (UniqueName: \"kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822170 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822189 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822209 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822239 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822583 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle\") pod \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822651 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data\") pod \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822677 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxbcs\" (UniqueName: \"kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs\") pod \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822717 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822756 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs\") pod \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\" (UID: \"8c1843cf-e6eb-400b-84ca-5e9d209a23ce\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822789 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml\") pod \"681a16ff-1468-4a9a-a692-5461230072bd\" (UID: \"681a16ff-1468-4a9a-a692-5461230072bd\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.822869 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nll4v\" (UniqueName: \"kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v\") pod \"29f503d7-a98b-4227-b9d9-865db16a2552\" (UID: \"29f503d7-a98b-4227-b9d9-865db16a2552\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.823649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.824415 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs" (OuterVolumeSpecName: "logs") pod "8c1843cf-e6eb-400b-84ca-5e9d209a23ce" (UID: "8c1843cf-e6eb-400b-84ca-5e9d209a23ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.825238 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run" (OuterVolumeSpecName: "var-run") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.825960 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.826362 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.827099 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts" (OuterVolumeSpecName: "scripts") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.829703 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.831999 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs" (OuterVolumeSpecName: "kube-api-access-pxbcs") pod "8c1843cf-e6eb-400b-84ca-5e9d209a23ce" (UID: "8c1843cf-e6eb-400b-84ca-5e9d209a23ce"). InnerVolumeSpecName "kube-api-access-pxbcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.832363 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89" (OuterVolumeSpecName: "kube-api-access-nnb89") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "kube-api-access-nnb89". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.832426 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8c1843cf-e6eb-400b-84ca-5e9d209a23ce" (UID: "8c1843cf-e6eb-400b-84ca-5e9d209a23ce"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.832436 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v" (OuterVolumeSpecName: "kube-api-access-nll4v") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "kube-api-access-nll4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.832754 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts" (OuterVolumeSpecName: "scripts") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.876048 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c1843cf-e6eb-400b-84ca-5e9d209a23ce" (UID: "8c1843cf-e6eb-400b-84ca-5e9d209a23ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.883245 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.896421 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.898042 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.900550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.925407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data" (OuterVolumeSpecName: "config-data") pod "8c1843cf-e6eb-400b-84ca-5e9d209a23ce" (UID: "8c1843cf-e6eb-400b-84ca-5e9d209a23ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.925794 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle\") pod \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.925904 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g49hs\" (UniqueName: \"kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs\") pod \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.925926 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs\") pod \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.925981 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926036 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7svjg\" (UniqueName: \"kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926078 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data\") pod \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926094 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom\") pod \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\" (UID: \"4f54d316-aa4d-4c56-8681-3fa9816a1b80\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926115 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926151 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926177 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926200 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom\") pod \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\" (UID: \"fd1c632c-d2c0-4f55-9727-af2ffbe1feef\") " Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926444 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926455 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926465 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926473 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxbcs\" (UniqueName: \"kubernetes.io/projected/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-kube-api-access-pxbcs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926482 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926490 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926497 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926505 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926514 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926523 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f503d7-a98b-4227-b9d9-865db16a2552-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926531 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nll4v\" (UniqueName: \"kubernetes.io/projected/29f503d7-a98b-4227-b9d9-865db16a2552-kube-api-access-nll4v\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926539 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926547 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/29f503d7-a98b-4227-b9d9-865db16a2552-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926556 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8c1843cf-e6eb-400b-84ca-5e9d209a23ce-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926564 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnb89\" (UniqueName: \"kubernetes.io/projected/681a16ff-1468-4a9a-a692-5461230072bd-kube-api-access-nnb89\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926573 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/681a16ff-1468-4a9a-a692-5461230072bd-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.926939 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.930372 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.930495 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs" (OuterVolumeSpecName: "logs") pod "4f54d316-aa4d-4c56-8681-3fa9816a1b80" (UID: "4f54d316-aa4d-4c56-8681-3fa9816a1b80"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.934964 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4f54d316-aa4d-4c56-8681-3fa9816a1b80" (UID: "4f54d316-aa4d-4c56-8681-3fa9816a1b80"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.938626 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.939150 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.939215 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts" (OuterVolumeSpecName: "scripts") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.939879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs" (OuterVolumeSpecName: "kube-api-access-g49hs") pod "4f54d316-aa4d-4c56-8681-3fa9816a1b80" (UID: "4f54d316-aa4d-4c56-8681-3fa9816a1b80"). InnerVolumeSpecName "kube-api-access-g49hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.958071 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg" (OuterVolumeSpecName: "kube-api-access-7svjg") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "kube-api-access-7svjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.975196 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f54d316-aa4d-4c56-8681-3fa9816a1b80" (UID: "4f54d316-aa4d-4c56-8681-3fa9816a1b80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.982778 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "29f503d7-a98b-4227-b9d9-865db16a2552" (UID: "29f503d7-a98b-4227-b9d9-865db16a2552"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:42 crc kubenswrapper[4982]: I0122 06:10:42.985623 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.022109 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.022198 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data" (OuterVolumeSpecName: "config-data") pod "681a16ff-1468-4a9a-a692-5461230072bd" (UID: "681a16ff-1468-4a9a-a692-5461230072bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027352 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szzhp\" (UniqueName: \"kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp\") pod \"3d1d97fa-17f8-45ed-9881-5d3896c48708\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027458 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data\") pod \"3d1d97fa-17f8-45ed-9881-5d3896c48708\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027559 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle\") pod \"3d1d97fa-17f8-45ed-9881-5d3896c48708\" (UID: \"3d1d97fa-17f8-45ed-9881-5d3896c48708\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027810 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027821 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027830 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027838 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027846 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027869 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a16ff-1468-4a9a-a692-5461230072bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027877 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027885 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027894 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29f503d7-a98b-4227-b9d9-865db16a2552-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027902 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g49hs\" (UniqueName: \"kubernetes.io/projected/4f54d316-aa4d-4c56-8681-3fa9816a1b80-kube-api-access-g49hs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027911 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f54d316-aa4d-4c56-8681-3fa9816a1b80-logs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027918 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.027926 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7svjg\" (UniqueName: \"kubernetes.io/projected/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-kube-api-access-7svjg\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.032375 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp" (OuterVolumeSpecName: "kube-api-access-szzhp") pod "3d1d97fa-17f8-45ed-9881-5d3896c48708" (UID: "3d1d97fa-17f8-45ed-9881-5d3896c48708"). InnerVolumeSpecName "kube-api-access-szzhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.041741 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data" (OuterVolumeSpecName: "config-data") pod "4f54d316-aa4d-4c56-8681-3fa9816a1b80" (UID: "4f54d316-aa4d-4c56-8681-3fa9816a1b80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.052429 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d1d97fa-17f8-45ed-9881-5d3896c48708" (UID: "3d1d97fa-17f8-45ed-9881-5d3896c48708"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.052505 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data" (OuterVolumeSpecName: "config-data") pod "3d1d97fa-17f8-45ed-9881-5d3896c48708" (UID: "3d1d97fa-17f8-45ed-9881-5d3896c48708"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.067963 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data" (OuterVolumeSpecName: "config-data") pod "fd1c632c-d2c0-4f55-9727-af2ffbe1feef" (UID: "fd1c632c-d2c0-4f55-9727-af2ffbe1feef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.074331 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.125262 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerID="b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.125307 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerDied","Data":"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.125329 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" event={"ID":"4f54d316-aa4d-4c56-8681-3fa9816a1b80","Type":"ContainerDied","Data":"78f518091f4664c3d8683cef6c0a28967b229c4ba09f65b3e47ad3e22a50ec4f"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.125346 4982 scope.go:117] "RemoveContainer" containerID="b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.125441 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59458f7b58-qd4fn" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128400 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data\") pod \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128438 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle\") pod \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128489 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h69g8\" (UniqueName: \"kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8\") pod \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\" (UID: \"bcc9f070-463f-4fef-8eb0-fd0cbe567f70\") " Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128741 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128755 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f54d316-aa4d-4c56-8681-3fa9816a1b80-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128765 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szzhp\" (UniqueName: \"kubernetes.io/projected/3d1d97fa-17f8-45ed-9881-5d3896c48708-kube-api-access-szzhp\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128776 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd1c632c-d2c0-4f55-9727-af2ffbe1feef-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.128784 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1d97fa-17f8-45ed-9881-5d3896c48708-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.131366 4982 generic.go:334] "Generic (PLEG): container finished" podID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.131413 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bcc9f070-463f-4fef-8eb0-fd0cbe567f70","Type":"ContainerDied","Data":"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.131619 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bcc9f070-463f-4fef-8eb0-fd0cbe567f70","Type":"ContainerDied","Data":"94a60ea41cc7f0a5b0f5b03abc16266b04dfa37d58bb427c2a16053fcccae0cc"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.131678 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.131990 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8" (OuterVolumeSpecName: "kube-api-access-h69g8") pod "bcc9f070-463f-4fef-8eb0-fd0cbe567f70" (UID: "bcc9f070-463f-4fef-8eb0-fd0cbe567f70"). InnerVolumeSpecName "kube-api-access-h69g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.136585 4982 generic.go:334] "Generic (PLEG): container finished" podID="681a16ff-1468-4a9a-a692-5461230072bd" containerID="18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.136688 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerDied","Data":"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.136716 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"681a16ff-1468-4a9a-a692-5461230072bd","Type":"ContainerDied","Data":"d96692ab4a1d200bcbd8c10460467ca5d15d0c37b9022e28e18ea1cad13649e0"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.136781 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.141402 4982 generic.go:334] "Generic (PLEG): container finished" podID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerID="ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.141475 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerDied","Data":"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.141498 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-796946d7c7-z4qnr" event={"ID":"8c1843cf-e6eb-400b-84ca-5e9d209a23ce","Type":"ContainerDied","Data":"7cb8b3b221ac3fa310c5c2ceedbd30e044b5963b2d92005e8a6ef9f1a1895229"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.141565 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-796946d7c7-z4qnr" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.147592 4982 generic.go:334] "Generic (PLEG): container finished" podID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerID="f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.147640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerDied","Data":"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.147659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd1c632c-d2c0-4f55-9727-af2ffbe1feef","Type":"ContainerDied","Data":"39f411a18af3d736e8af0acfe927d92c3d3a879ecfeeffc9f57f44de77742d9e"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.147705 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.150529 4982 generic.go:334] "Generic (PLEG): container finished" podID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" exitCode=0 Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.150585 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d1d97fa-17f8-45ed-9881-5d3896c48708","Type":"ContainerDied","Data":"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.150610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d1d97fa-17f8-45ed-9881-5d3896c48708","Type":"ContainerDied","Data":"1d43204a03b40042a47444008e962bdbba192494f6b0fd1b3263fa0bad4ceaae"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.150660 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.155023 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4z48g_29f503d7-a98b-4227-b9d9-865db16a2552/ovn-controller/0.log" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.155071 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4z48g" event={"ID":"29f503d7-a98b-4227-b9d9-865db16a2552","Type":"ContainerDied","Data":"ed61b64b0d0ead28c0e517f76c165751287f4dbabc0f8cb505f59fcbad460db5"} Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.155142 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4z48g" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.156721 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcc9f070-463f-4fef-8eb0-fd0cbe567f70" (UID: "bcc9f070-463f-4fef-8eb0-fd0cbe567f70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.158740 4982 scope.go:117] "RemoveContainer" containerID="56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.161655 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data" (OuterVolumeSpecName: "config-data") pod "bcc9f070-463f-4fef-8eb0-fd0cbe567f70" (UID: "bcc9f070-463f-4fef-8eb0-fd0cbe567f70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.178889 4982 scope.go:117] "RemoveContainer" containerID="b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.179420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff\": container with ID starting with b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff not found: ID does not exist" containerID="b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.179466 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff"} err="failed to get container status \"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff\": rpc error: code = NotFound desc = could not find container \"b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff\": container with ID starting with b99c9484adad7dd532b67e3b233b444b504c8b16be567d270be5129f88c9b4ff not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.179491 4982 scope.go:117] "RemoveContainer" containerID="56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.179790 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98\": container with ID starting with 56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98 not found: ID does not exist" containerID="56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.179814 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98"} err="failed to get container status \"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98\": rpc error: code = NotFound desc = could not find container \"56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98\": container with ID starting with 56701f933f64ebb2c234cb9d8a246c283c1bc8d3adb5dc9723fc632714881b98 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.179827 4982 scope.go:117] "RemoveContainer" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.180762 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.186295 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.199458 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.202528 4982 scope.go:117] "RemoveContainer" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.202979 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6\": container with ID starting with cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6 not found: ID does not exist" containerID="cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.203008 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6"} err="failed to get container status \"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6\": rpc error: code = NotFound desc = could not find container \"cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6\": container with ID starting with cdcccc69427ee2d1728f8e7d6f59420bebf9be65e0a9061fee307038b83d9cb6 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.203031 4982 scope.go:117] "RemoveContainer" containerID="fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.204547 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.217840 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.226439 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4z48g"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.229738 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.229775 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.229789 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h69g8\" (UniqueName: \"kubernetes.io/projected/bcc9f070-463f-4fef-8eb0-fd0cbe567f70-kube-api-access-h69g8\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.232508 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.236280 4982 scope.go:117] "RemoveContainer" containerID="140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.240350 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.246434 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.252568 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-59458f7b58-qd4fn"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.256892 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.260106 4982 scope.go:117] "RemoveContainer" containerID="18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.261053 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-796946d7c7-z4qnr"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.277573 4982 scope.go:117] "RemoveContainer" containerID="7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.297244 4982 scope.go:117] "RemoveContainer" containerID="fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.297490 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374\": container with ID starting with fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374 not found: ID does not exist" containerID="fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.297519 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374"} err="failed to get container status \"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374\": rpc error: code = NotFound desc = could not find container \"fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374\": container with ID starting with fc3d99dc9caa4c46c23d4707378cbddbf32648bc1c82450d35d566fe03f67374 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.297539 4982 scope.go:117] "RemoveContainer" containerID="140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.297893 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594\": container with ID starting with 140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594 not found: ID does not exist" containerID="140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.297914 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594"} err="failed to get container status \"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594\": rpc error: code = NotFound desc = could not find container \"140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594\": container with ID starting with 140d8add2f50c3a875e31d6ba85286481b8d656646014149c174dd7fcd703594 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.297927 4982 scope.go:117] "RemoveContainer" containerID="18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.298404 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6\": container with ID starting with 18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6 not found: ID does not exist" containerID="18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.298466 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6"} err="failed to get container status \"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6\": rpc error: code = NotFound desc = could not find container \"18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6\": container with ID starting with 18ecffa2d4497e293839524f6ed5f32c178f24e5d755feed0ffbc3b6d03460b6 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.298500 4982 scope.go:117] "RemoveContainer" containerID="7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.298800 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd\": container with ID starting with 7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd not found: ID does not exist" containerID="7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.298900 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd"} err="failed to get container status \"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd\": rpc error: code = NotFound desc = could not find container \"7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd\": container with ID starting with 7f2cbf5fe89ef897372c71697b853e4151f7bdaf7d9e22d86ada81c4c27538fd not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.298947 4982 scope.go:117] "RemoveContainer" containerID="ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.318909 4982 scope.go:117] "RemoveContainer" containerID="b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.358805 4982 scope.go:117] "RemoveContainer" containerID="ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.359456 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e\": container with ID starting with ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e not found: ID does not exist" containerID="ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.359503 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e"} err="failed to get container status \"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e\": rpc error: code = NotFound desc = could not find container \"ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e\": container with ID starting with ed8c2ad87b2789213c37313187684e228958de9f203b39f44ef8e21f33b5470e not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.359612 4982 scope.go:117] "RemoveContainer" containerID="b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.360029 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1\": container with ID starting with b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1 not found: ID does not exist" containerID="b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.360068 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1"} err="failed to get container status \"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1\": rpc error: code = NotFound desc = could not find container \"b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1\": container with ID starting with b0d30b764842fa8c34eb34bdaa21132a3490627329e40e68bdfc09e720158ae1 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.360094 4982 scope.go:117] "RemoveContainer" containerID="a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.390655 4982 scope.go:117] "RemoveContainer" containerID="f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.414649 4982 scope.go:117] "RemoveContainer" containerID="a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.415157 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d\": container with ID starting with a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d not found: ID does not exist" containerID="a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.415201 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d"} err="failed to get container status \"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d\": rpc error: code = NotFound desc = could not find container \"a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d\": container with ID starting with a568fc05e6d1bc9ef6cd2254326be3e07d0040651777d6881228c612c03c305d not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.415251 4982 scope.go:117] "RemoveContainer" containerID="f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.415563 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427\": container with ID starting with f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427 not found: ID does not exist" containerID="f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.415618 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427"} err="failed to get container status \"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427\": rpc error: code = NotFound desc = could not find container \"f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427\": container with ID starting with f9572435d6750a8ad99f0d2024d200987deec76a44ffc144a2134979ec9cd427 not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.415654 4982 scope.go:117] "RemoveContainer" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.442597 4982 scope.go:117] "RemoveContainer" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" Jan 22 06:10:43 crc kubenswrapper[4982]: E0122 06:10:43.443029 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd\": container with ID starting with d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd not found: ID does not exist" containerID="d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.443061 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd"} err="failed to get container status \"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd\": rpc error: code = NotFound desc = could not find container \"d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd\": container with ID starting with d529c70b3892ffd04ffa9e4ea7c7bd75cded5f79ec42a50c31cd18dce68669fd not found: ID does not exist" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.443083 4982 scope.go:117] "RemoveContainer" containerID="e5c83d320db754803d3204ed17858058cd2a48d175560794c946abe904478eb5" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.463864 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.474222 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.749328 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" path="/var/lib/kubelet/pods/29f503d7-a98b-4227-b9d9-865db16a2552/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.749876 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" path="/var/lib/kubelet/pods/3d1d97fa-17f8-45ed-9881-5d3896c48708/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.750432 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" path="/var/lib/kubelet/pods/4f54d316-aa4d-4c56-8681-3fa9816a1b80/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.751506 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="681a16ff-1468-4a9a-a692-5461230072bd" path="/var/lib/kubelet/pods/681a16ff-1468-4a9a-a692-5461230072bd/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.752425 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" path="/var/lib/kubelet/pods/7623a66b-412d-4202-bd05-58fba1c6a3d3/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.753519 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" path="/var/lib/kubelet/pods/8c1843cf-e6eb-400b-84ca-5e9d209a23ce/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.754376 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" path="/var/lib/kubelet/pods/bb198f24-94e6-4569-be12-9ee57000a3e3/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.754911 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" path="/var/lib/kubelet/pods/bcc9f070-463f-4fef-8eb0-fd0cbe567f70/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.756095 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" path="/var/lib/kubelet/pods/e9f36aaa-d627-4dfe-ab21-b5bedee0a25c/volumes" Jan 22 06:10:43 crc kubenswrapper[4982]: I0122 06:10:43.756535 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" path="/var/lib/kubelet/pods/fd1c632c-d2c0-4f55-9727-af2ffbe1feef/volumes" Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.089289 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.090165 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.090771 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.090821 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.095315 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.102115 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.103997 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:46 crc kubenswrapper[4982]: E0122 06:10:46.104054 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.720622 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866047 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866125 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866180 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866218 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866234 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prfjm\" (UniqueName: \"kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866278 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.866325 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs\") pod \"b6593811-6583-4900-b402-5af9db3887b3\" (UID: \"b6593811-6583-4900-b402-5af9db3887b3\") " Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.872801 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm" (OuterVolumeSpecName: "kube-api-access-prfjm") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "kube-api-access-prfjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.874504 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.927080 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config" (OuterVolumeSpecName: "config") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.932086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.934875 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.944836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.945111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b6593811-6583-4900-b402-5af9db3887b3" (UID: "b6593811-6583-4900-b402-5af9db3887b3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.968887 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969194 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969359 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969516 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-config\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969622 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969725 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prfjm\" (UniqueName: \"kubernetes.io/projected/b6593811-6583-4900-b402-5af9db3887b3-kube-api-access-prfjm\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:49 crc kubenswrapper[4982]: I0122 06:10:49.969828 4982 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6593811-6583-4900-b402-5af9db3887b3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.235324 4982 generic.go:334] "Generic (PLEG): container finished" podID="b6593811-6583-4900-b402-5af9db3887b3" containerID="52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5" exitCode=0 Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.235410 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f9754c45-wzfk5" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.235418 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerDied","Data":"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5"} Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.235977 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f9754c45-wzfk5" event={"ID":"b6593811-6583-4900-b402-5af9db3887b3","Type":"ContainerDied","Data":"c527eec1bdca10c551098f56f23a4e6dab9f4a03e9aaeca03064606b28a6628e"} Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.236011 4982 scope.go:117] "RemoveContainer" containerID="f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.273682 4982 scope.go:117] "RemoveContainer" containerID="52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.282512 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.289172 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f9754c45-wzfk5"] Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.303322 4982 scope.go:117] "RemoveContainer" containerID="f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1" Jan 22 06:10:50 crc kubenswrapper[4982]: E0122 06:10:50.303754 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1\": container with ID starting with f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1 not found: ID does not exist" containerID="f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.303956 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1"} err="failed to get container status \"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1\": rpc error: code = NotFound desc = could not find container \"f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1\": container with ID starting with f1b86ad014feaae9b9be0659bf2c7dfe329f6b9ea2485bf696dc7ce0d69d2fd1 not found: ID does not exist" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.304087 4982 scope.go:117] "RemoveContainer" containerID="52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5" Jan 22 06:10:50 crc kubenswrapper[4982]: E0122 06:10:50.304611 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5\": container with ID starting with 52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5 not found: ID does not exist" containerID="52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5" Jan 22 06:10:50 crc kubenswrapper[4982]: I0122 06:10:50.304643 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5"} err="failed to get container status \"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5\": rpc error: code = NotFound desc = could not find container \"52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5\": container with ID starting with 52c43ed6ae44082331dda538371f368b195ae17eec22b655bead980b3261aae5 not found: ID does not exist" Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.088544 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.089226 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.089541 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.089598 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.090190 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.092645 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.099056 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:51 crc kubenswrapper[4982]: E0122 06:10:51.099135 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:10:51 crc kubenswrapper[4982]: I0122 06:10:51.735829 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6593811-6583-4900-b402-5af9db3887b3" path="/var/lib/kubelet/pods/b6593811-6583-4900-b402-5af9db3887b3/volumes" Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.090351 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.091385 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.091565 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.091955 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.092041 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.096341 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.098392 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:10:56 crc kubenswrapper[4982]: E0122 06:10:56.098469 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.088346 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.089208 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.089766 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.089809 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.090273 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.091946 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.093600 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 06:11:01 crc kubenswrapper[4982]: E0122 06:11:01.093662 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-74zx8" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.383998 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-74zx8_e8478549-adf2-4e04-b404-f4882ed405d5/ovs-vswitchd/0.log" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.385155 4982 generic.go:334] "Generic (PLEG): container finished" podID="e8478549-adf2-4e04-b404-f4882ed405d5" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" exitCode=137 Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.385257 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerDied","Data":"d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66"} Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.392604 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerID="21acccb66fe5a7601f47f21bf74eb39ca16b5d4458bb059e53d0b376d47e4e6e" exitCode=137 Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.392667 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"21acccb66fe5a7601f47f21bf74eb39ca16b5d4458bb059e53d0b376d47e4e6e"} Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.552018 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-74zx8_e8478549-adf2-4e04-b404-f4882ed405d5/ovs-vswitchd/0.log" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.553162 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.658306 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667563 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2xz5\" (UniqueName: \"kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667654 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667754 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667771 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667817 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs\") pod \"e8478549-adf2-4e04-b404-f4882ed405d5\" (UID: \"e8478549-adf2-4e04-b404-f4882ed405d5\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.667907 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib" (OuterVolumeSpecName: "var-lib") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.668004 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run" (OuterVolumeSpecName: "var-run") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.668021 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.668038 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log" (OuterVolumeSpecName: "var-log") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.668058 4982 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-lib\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.670246 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts" (OuterVolumeSpecName: "scripts") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.673120 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5" (OuterVolumeSpecName: "kube-api-access-h2xz5") pod "e8478549-adf2-4e04-b404-f4882ed405d5" (UID: "e8478549-adf2-4e04-b404-f4882ed405d5"). InnerVolumeSpecName "kube-api-access-h2xz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769012 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2dmc\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769061 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769089 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769106 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769151 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769173 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock\") pod \"0e66de65-9639-4397-bc39-dfcf0c325dff\" (UID: \"0e66de65-9639-4397-bc39-dfcf0c325dff\") " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769675 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock" (OuterVolumeSpecName: "lock") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769749 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache" (OuterVolumeSpecName: "cache") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769899 4982 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.769916 4982 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-lock\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.770288 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.770305 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8478549-adf2-4e04-b404-f4882ed405d5-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.770312 4982 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e8478549-adf2-4e04-b404-f4882ed405d5-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.770320 4982 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0e66de65-9639-4397-bc39-dfcf0c325dff-cache\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.770329 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2xz5\" (UniqueName: \"kubernetes.io/projected/e8478549-adf2-4e04-b404-f4882ed405d5-kube-api-access-h2xz5\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.772188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc" (OuterVolumeSpecName: "kube-api-access-x2dmc") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "kube-api-access-x2dmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.772947 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.773798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "swift") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.872124 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.872174 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2dmc\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-kube-api-access-x2dmc\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.872196 4982 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0e66de65-9639-4397-bc39-dfcf0c325dff-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.897617 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 22 06:11:04 crc kubenswrapper[4982]: I0122 06:11:04.973657 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.026408 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e66de65-9639-4397-bc39-dfcf0c325dff" (UID: "0e66de65-9639-4397-bc39-dfcf0c325dff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.075339 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e66de65-9639-4397-bc39-dfcf0c325dff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.415207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0e66de65-9639-4397-bc39-dfcf0c325dff","Type":"ContainerDied","Data":"69159f31516b9b97593444a8f33bb79c004b1603ecffb30aa1115efbc9c78432"} Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.415289 4982 scope.go:117] "RemoveContainer" containerID="21acccb66fe5a7601f47f21bf74eb39ca16b5d4458bb059e53d0b376d47e4e6e" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.415292 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.417634 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-74zx8_e8478549-adf2-4e04-b404-f4882ed405d5/ovs-vswitchd/0.log" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.418820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-74zx8" event={"ID":"e8478549-adf2-4e04-b404-f4882ed405d5","Type":"ContainerDied","Data":"fae00728b8ff39acdf9ef7e07b72c9a4110abc5d73270e2a0dcb02d551bcf97f"} Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.419058 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-74zx8" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.447358 4982 scope.go:117] "RemoveContainer" containerID="b878ca638cdba06e768d1e1407882909e1a9532997c7390f2d2cc87458ce86aa" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.491415 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.501972 4982 scope.go:117] "RemoveContainer" containerID="310a52e8c276d358ad53746988709b191603600640992af9703c7c83f46a3101" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.502795 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.509269 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.516091 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-74zx8"] Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.544259 4982 scope.go:117] "RemoveContainer" containerID="a386b99b03239bfd525b9cda2c117bcba26367449801201fb1b1a6f2e03e42aa" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.564622 4982 scope.go:117] "RemoveContainer" containerID="45257d2dda5df21dd5c33ca01b27b1606dde8ba4b98ce40f6c5bf2094829e0de" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.585988 4982 scope.go:117] "RemoveContainer" containerID="2d157d8183c5ba1a3484d5097ce18a1c0be897ffeadde7df10cfd7adde58b171" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.608770 4982 scope.go:117] "RemoveContainer" containerID="e4748ea7b210414126713179ef219e82a0abc3f56fde6ec484724799d53bea45" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.628465 4982 scope.go:117] "RemoveContainer" containerID="426ef7c3195021e87134941727b96e87ac5a055405603c85838becb9891299c1" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.654892 4982 scope.go:117] "RemoveContainer" containerID="f4db4555825c7a98050d3fa64f127578b1f2415604cf41cb3cb729a79fc23f72" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.684952 4982 scope.go:117] "RemoveContainer" containerID="be63c0d0c6adf69366e7b0baa98c3881df8d90e7905a24b623cae778f04a2eed" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.711829 4982 scope.go:117] "RemoveContainer" containerID="f0e4c628f7ebdabca21ce9e2c5341af6fc155bfe4b2913c68c56bd267c6a4629" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.735738 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" path="/var/lib/kubelet/pods/0e66de65-9639-4397-bc39-dfcf0c325dff/volumes" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.736457 4982 scope.go:117] "RemoveContainer" containerID="0c83088039377322228a75312f73aef354b452dc32b72f3e7422b93e15270c5a" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.737561 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" path="/var/lib/kubelet/pods/e8478549-adf2-4e04-b404-f4882ed405d5/volumes" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.757536 4982 scope.go:117] "RemoveContainer" containerID="95bd78bf0085807f031d31a6e3f8ecd58ff306054033420b516a89b6c828e39b" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.779143 4982 scope.go:117] "RemoveContainer" containerID="4a16824db8797178b4f0e9143a8c7fd2a4c2b43cd6bdf358987351b835a9dfa3" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.805694 4982 scope.go:117] "RemoveContainer" containerID="78cb51cdcbb32c7c566dd440fe3eddf657c1d8fa7d725c7170c6f86381854c75" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.833246 4982 scope.go:117] "RemoveContainer" containerID="d07fbec73b82d46cf6bca3ce65be1c95f30a0ec4db97b397ade4d734c12b4c66" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.863367 4982 scope.go:117] "RemoveContainer" containerID="8b412d91d5b2983ea9d7a26be4f70e6c27effcc25abd733c9e58d8f1fbb0e59b" Jan 22 06:11:05 crc kubenswrapper[4982]: I0122 06:11:05.886989 4982 scope.go:117] "RemoveContainer" containerID="b532a25aee0797dbd740b203472f01ab6f2aa1c38b5e22da29a10d70d1898802" Jan 22 06:11:58 crc kubenswrapper[4982]: I0122 06:11:58.271167 4982 scope.go:117] "RemoveContainer" containerID="13f7a9e83037686cbbc1266994ac9f274abbaae84d3e10a553ddd1ed81c5e0c8" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.447480 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448664 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448685 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448711 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448724 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448743 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-reaper" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448757 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-reaper" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448776 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448788 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448812 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448827 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448879 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448896 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448917 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448933 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.448962 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.448978 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449002 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449018 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449050 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="openstack-network-exporter" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449065 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="openstack-network-exporter" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449087 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449103 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449132 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449147 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-server" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449164 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449178 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449199 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449213 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449226 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="sg-core" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449237 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="sg-core" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449261 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="rsync" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449273 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="rsync" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449289 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449301 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449325 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="swift-recon-cron" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449337 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="swift-recon-cron" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449354 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449366 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449387 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449399 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449417 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-expirer" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449429 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-expirer" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449449 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449460 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449483 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server-init" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449495 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server-init" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449517 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07b70872-6840-498b-be43-290f43590bb9" containerName="kube-state-metrics" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449532 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="07b70872-6840-498b-be43-290f43590bb9" containerName="kube-state-metrics" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449551 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449563 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449581 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="setup-container" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449592 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="setup-container" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449638 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449651 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-server" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449666 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-notification-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449679 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-notification-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449695 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerName="memcached" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449707 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerName="memcached" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449722 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449735 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449749 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" containerName="nova-scheduler-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449762 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" containerName="nova-scheduler-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449774 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449786 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449801 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="setup-container" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449812 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="setup-container" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449825 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerName="nova-cell0-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449838 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerName="nova-cell0-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449888 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="proxy-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449902 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="proxy-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449932 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449944 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-server" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449959 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.449971 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.449989 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450000 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450012 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-central-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450024 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-central-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450038 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450050 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450067 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450079 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450097 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="probe" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450109 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="probe" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450124 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450136 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450149 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450161 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450178 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" containerName="keystone-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450190 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" containerName="keystone-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450211 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="cinder-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450225 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="cinder-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450246 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450262 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450281 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450297 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450327 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="mysql-bootstrap" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450340 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="mysql-bootstrap" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450360 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450376 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450399 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450415 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450438 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="galera" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450454 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="galera" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450472 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450488 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450510 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450523 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450543 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450558 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450572 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450584 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450603 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450616 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450636 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450650 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener-log" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450676 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="ovn-northd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450691 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="ovn-northd" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450711 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450726 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-api" Jan 22 06:12:14 crc kubenswrapper[4982]: E0122 06:12:14.450752 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.450768 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451350 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451390 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9f36aaa-d627-4dfe-ab21-b5bedee0a25c" containerName="keystone-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451409 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451426 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="sg-core" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451448 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451462 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451476 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-metadata" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451492 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovsdb-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451506 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-reaper" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451523 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451541 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451554 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451567 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-updater" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451590 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e2e1a17-8278-4fe9-a83e-aeb0a61c69df" containerName="nova-scheduler-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451613 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1d97fa-17f8-45ed-9881-5d3896c48708" containerName="nova-cell0-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451639 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f503dfc-13a3-4f48-8d84-c0b8a8e54b6e" containerName="memcached" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451666 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="swift-recon-cron" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451685 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451713 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6593811-6583-4900-b402-5af9db3887b3" containerName="neutron-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451730 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb198f24-94e6-4569-be12-9ee57000a3e3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451751 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451773 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451786 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451802 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451821 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc9f070-463f-4fef-8eb0-fd0cbe567f70" containerName="nova-cell1-conductor-conductor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451836 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451888 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0be45e9e-2ce7-4c9b-abee-2e0249e6e53e" containerName="cinder-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451903 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="ovn-northd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451916 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="proxy-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451932 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8478549-adf2-4e04-b404-f4882ed405d5" containerName="ovs-vswitchd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451945 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="29f503d7-a98b-4227-b9d9-865db16a2552" containerName="ovn-controller" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451963 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="rsync" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451977 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="probe" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.451994 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452013 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-expirer" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452028 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba39683-f142-405a-a1c8-83841c5b2cd0" containerName="placement-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452047 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f54d316-aa4d-4c56-8681-3fa9816a1b80" containerName="barbican-keystone-listener" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452062 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7623a66b-412d-4202-bd05-58fba1c6a3d3" containerName="rabbitmq" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452076 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1669f2c1-5f4c-4a3e-ae64-5d754ec522bc" containerName="nova-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452089 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d77f6f1-5c7d-45e5-92e5-8e333d91c020" containerName="glance-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452111 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-central-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452127 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452145 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452159 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fb512c6-b4d3-4b1e-93ad-b991e2b9dd21" containerName="mariadb-account-create-update" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452175 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c227aae-c987-448b-9545-febc060f0929" containerName="galera" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452194 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="07b70872-6840-498b-be43-290f43590bb9" containerName="kube-state-metrics" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452207 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="53013384-1492-4c2e-9c7a-cd81d4d07018" containerName="nova-metadata-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452225 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd1c632c-d2c0-4f55-9727-af2ffbe1feef" containerName="cinder-scheduler" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452239 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac0dc6e4-35d9-4d06-93af-a8758648aa13" containerName="glance-httpd" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452254 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="object-replicator" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452271 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452290 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ce93171-ff81-4a46-9813-2807930a945c" containerName="openstack-network-exporter" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452305 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="container-auditor" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452320 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a16ff-1468-4a9a-a692-5461230072bd" containerName="ceilometer-notification-agent" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452333 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66de65-9639-4397-bc39-dfcf0c325dff" containerName="account-server" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452353 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b565c71-916f-40d6-aa91-de5fd3a323d6" containerName="barbican-api-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.452371 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1843cf-e6eb-400b-84ca-5e9d209a23ce" containerName="barbican-worker-log" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.454591 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.474918 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.577196 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqcxd\" (UniqueName: \"kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.577277 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.577327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.678411 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqcxd\" (UniqueName: \"kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.678477 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.678499 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.679063 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.679133 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.701014 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqcxd\" (UniqueName: \"kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd\") pod \"certified-operators-r542h\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:14 crc kubenswrapper[4982]: I0122 06:12:14.810763 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:15 crc kubenswrapper[4982]: I0122 06:12:15.305641 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:16 crc kubenswrapper[4982]: I0122 06:12:16.131158 4982 generic.go:334] "Generic (PLEG): container finished" podID="7639ee66-eac9-44fd-b648-41d681486922" containerID="c18c19d19b823988fcff82add81e09867ad67d0deef38744b48c7a46486e6c1b" exitCode=0 Jan 22 06:12:16 crc kubenswrapper[4982]: I0122 06:12:16.131231 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerDied","Data":"c18c19d19b823988fcff82add81e09867ad67d0deef38744b48c7a46486e6c1b"} Jan 22 06:12:16 crc kubenswrapper[4982]: I0122 06:12:16.131268 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerStarted","Data":"5512115cb0f0b2d08dc82794ef52144b8fd30a46aff4ad25552c0ae6fc9206de"} Jan 22 06:12:17 crc kubenswrapper[4982]: I0122 06:12:17.176236 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerStarted","Data":"806585c36aeae7d00afe2c9f98e00e76c3980e907a416229dd88987b2ba16f2b"} Jan 22 06:12:18 crc kubenswrapper[4982]: I0122 06:12:18.191591 4982 generic.go:334] "Generic (PLEG): container finished" podID="7639ee66-eac9-44fd-b648-41d681486922" containerID="806585c36aeae7d00afe2c9f98e00e76c3980e907a416229dd88987b2ba16f2b" exitCode=0 Jan 22 06:12:18 crc kubenswrapper[4982]: I0122 06:12:18.191648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerDied","Data":"806585c36aeae7d00afe2c9f98e00e76c3980e907a416229dd88987b2ba16f2b"} Jan 22 06:12:19 crc kubenswrapper[4982]: I0122 06:12:19.210634 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerStarted","Data":"55efc25d7102c3c73a19b041ad1566ce5088d6906359b68af48523f18ef8907d"} Jan 22 06:12:19 crc kubenswrapper[4982]: I0122 06:12:19.237478 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r542h" podStartSLOduration=2.476608676 podStartE2EDuration="5.237456178s" podCreationTimestamp="2026-01-22 06:12:14 +0000 UTC" firstStartedPulling="2026-01-22 06:12:16.133848061 +0000 UTC m=+1596.972486094" lastFinishedPulling="2026-01-22 06:12:18.894695563 +0000 UTC m=+1599.733333596" observedRunningTime="2026-01-22 06:12:19.23596415 +0000 UTC m=+1600.074602213" watchObservedRunningTime="2026-01-22 06:12:19.237456178 +0000 UTC m=+1600.076094221" Jan 22 06:12:24 crc kubenswrapper[4982]: I0122 06:12:24.811920 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:24 crc kubenswrapper[4982]: I0122 06:12:24.812588 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:24 crc kubenswrapper[4982]: I0122 06:12:24.892389 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:25 crc kubenswrapper[4982]: I0122 06:12:25.348470 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:25 crc kubenswrapper[4982]: I0122 06:12:25.411085 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:27 crc kubenswrapper[4982]: I0122 06:12:27.297476 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r542h" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="registry-server" containerID="cri-o://55efc25d7102c3c73a19b041ad1566ce5088d6906359b68af48523f18ef8907d" gracePeriod=2 Jan 22 06:12:28 crc kubenswrapper[4982]: I0122 06:12:28.310035 4982 generic.go:334] "Generic (PLEG): container finished" podID="7639ee66-eac9-44fd-b648-41d681486922" containerID="55efc25d7102c3c73a19b041ad1566ce5088d6906359b68af48523f18ef8907d" exitCode=0 Jan 22 06:12:28 crc kubenswrapper[4982]: I0122 06:12:28.310076 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerDied","Data":"55efc25d7102c3c73a19b041ad1566ce5088d6906359b68af48523f18ef8907d"} Jan 22 06:12:28 crc kubenswrapper[4982]: I0122 06:12:28.848472 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.013878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content\") pod \"7639ee66-eac9-44fd-b648-41d681486922\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.013940 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities\") pod \"7639ee66-eac9-44fd-b648-41d681486922\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.014110 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqcxd\" (UniqueName: \"kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd\") pod \"7639ee66-eac9-44fd-b648-41d681486922\" (UID: \"7639ee66-eac9-44fd-b648-41d681486922\") " Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.015067 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities" (OuterVolumeSpecName: "utilities") pod "7639ee66-eac9-44fd-b648-41d681486922" (UID: "7639ee66-eac9-44fd-b648-41d681486922"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.026144 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd" (OuterVolumeSpecName: "kube-api-access-jqcxd") pod "7639ee66-eac9-44fd-b648-41d681486922" (UID: "7639ee66-eac9-44fd-b648-41d681486922"). InnerVolumeSpecName "kube-api-access-jqcxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.077803 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7639ee66-eac9-44fd-b648-41d681486922" (UID: "7639ee66-eac9-44fd-b648-41d681486922"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.116010 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.116045 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7639ee66-eac9-44fd-b648-41d681486922-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.116056 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqcxd\" (UniqueName: \"kubernetes.io/projected/7639ee66-eac9-44fd-b648-41d681486922-kube-api-access-jqcxd\") on node \"crc\" DevicePath \"\"" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.324400 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r542h" event={"ID":"7639ee66-eac9-44fd-b648-41d681486922","Type":"ContainerDied","Data":"5512115cb0f0b2d08dc82794ef52144b8fd30a46aff4ad25552c0ae6fc9206de"} Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.324617 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r542h" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.325868 4982 scope.go:117] "RemoveContainer" containerID="55efc25d7102c3c73a19b041ad1566ce5088d6906359b68af48523f18ef8907d" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.362455 4982 scope.go:117] "RemoveContainer" containerID="806585c36aeae7d00afe2c9f98e00e76c3980e907a416229dd88987b2ba16f2b" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.383453 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.393771 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r542h"] Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.426314 4982 scope.go:117] "RemoveContainer" containerID="c18c19d19b823988fcff82add81e09867ad67d0deef38744b48c7a46486e6c1b" Jan 22 06:12:29 crc kubenswrapper[4982]: I0122 06:12:29.734792 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7639ee66-eac9-44fd-b648-41d681486922" path="/var/lib/kubelet/pods/7639ee66-eac9-44fd-b648-41d681486922/volumes" Jan 22 06:12:48 crc kubenswrapper[4982]: I0122 06:12:48.973640 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:12:48 crc kubenswrapper[4982]: I0122 06:12:48.974279 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.392366 4982 scope.go:117] "RemoveContainer" containerID="cf2bb070f887de8ffa9f6fa71a4230e53ea11650e53bc5ad809d7ec836fff0a2" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.424555 4982 scope.go:117] "RemoveContainer" containerID="6d529e2f9c7a8d7c7b0443548df383571f79d3ebf05dff9c71e77849bbfde977" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.456824 4982 scope.go:117] "RemoveContainer" containerID="f03d5187559edee59c68b9b8f3273c1775dd301a33e96ee5797bda602ce3d7bc" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.492846 4982 scope.go:117] "RemoveContainer" containerID="2dac11986a165e87adcb29b9332da29904dd3b38da6e010dced58f56bcf4ad7a" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.524135 4982 scope.go:117] "RemoveContainer" containerID="041fb90cabe67b0ff6a1f729262835dc3719ade67e8c157c92d95706cf6a990b" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.572701 4982 scope.go:117] "RemoveContainer" containerID="3cc5e4d2d0314f0d79f3bc022c76c4814327e535cb229ac9ea69b99793b639e1" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.607996 4982 scope.go:117] "RemoveContainer" containerID="91dad18bfff7d230114853f2b002e5d721b5ae8faf28fe8993f5274ca7c94eec" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.632055 4982 scope.go:117] "RemoveContainer" containerID="e4bcbdccd4c9eb15eecdd175dc951dcc4cded5d82f01be1657956b30eb3f4bbb" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.676094 4982 scope.go:117] "RemoveContainer" containerID="96443aee780992f73d8bf465dd81fc5713de4eb03bf77c564cee2ba08450265c" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.705875 4982 scope.go:117] "RemoveContainer" containerID="2508ceb17f5b081fedae60a792a07d63197d4ce625572aa4c03d2c6d1fd5177c" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.723489 4982 scope.go:117] "RemoveContainer" containerID="423b6ad43153e8ebe85acafa36b79589a12cb9022f0450f0a6e23033dac68534" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.742242 4982 scope.go:117] "RemoveContainer" containerID="f21f6b2f159515920e265fd73f4db3e8cdea1216effa74442ab287d2ab610e13" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.787310 4982 scope.go:117] "RemoveContainer" containerID="c8f0e29e462a5168ff8d0e06215e4707292c464ad4ee80345d8a1e7288b9793e" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.817517 4982 scope.go:117] "RemoveContainer" containerID="d942076c8167ac6a314e546366e8b1ceb42275cdc5d5080afef9aefeffe2759d" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.838108 4982 scope.go:117] "RemoveContainer" containerID="e1876ee24c9fffd277bce8275d6388beac19ae4bb7662cbc6970d20f3f974e3c" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.883819 4982 scope.go:117] "RemoveContainer" containerID="7513f113fd8c4c67cbc53fb0a733846da65c7fce1193f50b23fc013ac9b0faa4" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.914525 4982 scope.go:117] "RemoveContainer" containerID="fa8b66a769c752600146a2bd21e5db11db65361248dd5b3d212a9269c42ae42b" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.951924 4982 scope.go:117] "RemoveContainer" containerID="32877ec2e3d36e3e0a5db604267c0aa2ff16bc6c3b616ac86b4b260c0191e454" Jan 22 06:12:58 crc kubenswrapper[4982]: I0122 06:12:58.983158 4982 scope.go:117] "RemoveContainer" containerID="96daef9a1308445e129ae30eb202f53f16ca0d76f654c7453781f263ed77a164" Jan 22 06:12:59 crc kubenswrapper[4982]: I0122 06:12:59.005091 4982 scope.go:117] "RemoveContainer" containerID="60c7b99cef0a8b8fb2b268e986ee0042e7076f1e35e494fec4e8f5a27613ec40" Jan 22 06:12:59 crc kubenswrapper[4982]: I0122 06:12:59.034633 4982 scope.go:117] "RemoveContainer" containerID="6a0c35338e2f5b356715e798df704abdc39b26477bc7c13626e347a37d3bb802" Jan 22 06:12:59 crc kubenswrapper[4982]: I0122 06:12:59.071522 4982 scope.go:117] "RemoveContainer" containerID="715c04a2da3985828f5bdcf5384f56119ba51d448ff26fc87cbe6892e62a56ab" Jan 22 06:12:59 crc kubenswrapper[4982]: I0122 06:12:59.105316 4982 scope.go:117] "RemoveContainer" containerID="6ec1fe11dc3ff5f1bbac61b12daeecd51d1202d93fc8aa01545ec8f54173fcc8" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.413339 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:05 crc kubenswrapper[4982]: E0122 06:13:05.414166 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="extract-content" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.414181 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="extract-content" Jan 22 06:13:05 crc kubenswrapper[4982]: E0122 06:13:05.414212 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="extract-utilities" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.414220 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="extract-utilities" Jan 22 06:13:05 crc kubenswrapper[4982]: E0122 06:13:05.414239 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="registry-server" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.414247 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="registry-server" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.414411 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7639ee66-eac9-44fd-b648-41d681486922" containerName="registry-server" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.415626 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.434162 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.514494 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-625jv\" (UniqueName: \"kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.514545 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.514604 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.616042 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.616130 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-625jv\" (UniqueName: \"kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.616152 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.616672 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.616711 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.634663 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-625jv\" (UniqueName: \"kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv\") pod \"redhat-marketplace-8tjwn\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:05 crc kubenswrapper[4982]: I0122 06:13:05.740385 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:06 crc kubenswrapper[4982]: I0122 06:13:06.293111 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:06 crc kubenswrapper[4982]: I0122 06:13:06.678589 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerStarted","Data":"dd1ea214c1341553ed44f80ad00e37f025b9187017b6e5f729f3419601d49d41"} Jan 22 06:13:08 crc kubenswrapper[4982]: I0122 06:13:08.700312 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerID="16ccd364971b84f9e1aefde70ffe870dfce4ec745425bb661ac50bbf45b722c2" exitCode=0 Jan 22 06:13:08 crc kubenswrapper[4982]: I0122 06:13:08.700401 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerDied","Data":"16ccd364971b84f9e1aefde70ffe870dfce4ec745425bb661ac50bbf45b722c2"} Jan 22 06:13:09 crc kubenswrapper[4982]: I0122 06:13:09.712566 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerID="b6075a60d0588ab6384967f796d19e90797092838f29dca692e35dcc7a076930" exitCode=0 Jan 22 06:13:09 crc kubenswrapper[4982]: I0122 06:13:09.712634 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerDied","Data":"b6075a60d0588ab6384967f796d19e90797092838f29dca692e35dcc7a076930"} Jan 22 06:13:10 crc kubenswrapper[4982]: I0122 06:13:10.743686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerStarted","Data":"808747365a4408bdd8f14959b9a92dbda405f118de1a655ee5611dad11349bc8"} Jan 22 06:13:10 crc kubenswrapper[4982]: I0122 06:13:10.783268 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8tjwn" podStartSLOduration=4.33677688 podStartE2EDuration="5.783229935s" podCreationTimestamp="2026-01-22 06:13:05 +0000 UTC" firstStartedPulling="2026-01-22 06:13:08.702844462 +0000 UTC m=+1649.541482475" lastFinishedPulling="2026-01-22 06:13:10.149297497 +0000 UTC m=+1650.987935530" observedRunningTime="2026-01-22 06:13:10.775241215 +0000 UTC m=+1651.613879228" watchObservedRunningTime="2026-01-22 06:13:10.783229935 +0000 UTC m=+1651.621867978" Jan 22 06:13:15 crc kubenswrapper[4982]: I0122 06:13:15.741910 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:15 crc kubenswrapper[4982]: I0122 06:13:15.742334 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:15 crc kubenswrapper[4982]: I0122 06:13:15.806681 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:15 crc kubenswrapper[4982]: I0122 06:13:15.888587 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:16 crc kubenswrapper[4982]: I0122 06:13:16.071954 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:17 crc kubenswrapper[4982]: I0122 06:13:17.807038 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8tjwn" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="registry-server" containerID="cri-o://808747365a4408bdd8f14959b9a92dbda405f118de1a655ee5611dad11349bc8" gracePeriod=2 Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.816162 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerID="808747365a4408bdd8f14959b9a92dbda405f118de1a655ee5611dad11349bc8" exitCode=0 Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.816180 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerDied","Data":"808747365a4408bdd8f14959b9a92dbda405f118de1a655ee5611dad11349bc8"} Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.816656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tjwn" event={"ID":"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362","Type":"ContainerDied","Data":"dd1ea214c1341553ed44f80ad00e37f025b9187017b6e5f729f3419601d49d41"} Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.816685 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd1ea214c1341553ed44f80ad00e37f025b9187017b6e5f729f3419601d49d41" Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.830444 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.932537 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content\") pod \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.932594 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities\") pod \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.932772 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-625jv\" (UniqueName: \"kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv\") pod \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\" (UID: \"59d491cf-c9aa-4f1e-9aaf-0d92b28d7362\") " Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.934482 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities" (OuterVolumeSpecName: "utilities") pod "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" (UID: "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.940266 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv" (OuterVolumeSpecName: "kube-api-access-625jv") pod "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" (UID: "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362"). InnerVolumeSpecName "kube-api-access-625jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.966824 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" (UID: "59d491cf-c9aa-4f1e-9aaf-0d92b28d7362"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.974234 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:13:18 crc kubenswrapper[4982]: I0122 06:13:18.974299 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.035386 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-625jv\" (UniqueName: \"kubernetes.io/projected/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-kube-api-access-625jv\") on node \"crc\" DevicePath \"\"" Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.035445 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.035884 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.833979 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tjwn" Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.873473 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:19 crc kubenswrapper[4982]: I0122 06:13:19.886869 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tjwn"] Jan 22 06:13:21 crc kubenswrapper[4982]: I0122 06:13:21.735051 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" path="/var/lib/kubelet/pods/59d491cf-c9aa-4f1e-9aaf-0d92b28d7362/volumes" Jan 22 06:13:48 crc kubenswrapper[4982]: I0122 06:13:48.974298 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:13:48 crc kubenswrapper[4982]: I0122 06:13:48.976728 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:13:48 crc kubenswrapper[4982]: I0122 06:13:48.976916 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:13:48 crc kubenswrapper[4982]: I0122 06:13:48.978593 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:13:48 crc kubenswrapper[4982]: I0122 06:13:48.978723 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" gracePeriod=600 Jan 22 06:13:49 crc kubenswrapper[4982]: E0122 06:13:49.111549 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:13:49 crc kubenswrapper[4982]: I0122 06:13:49.124932 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" exitCode=0 Jan 22 06:13:49 crc kubenswrapper[4982]: I0122 06:13:49.125016 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2"} Jan 22 06:13:49 crc kubenswrapper[4982]: I0122 06:13:49.125337 4982 scope.go:117] "RemoveContainer" containerID="7453d7468d19fce4345431851e8d2b69d7c8b233d592ee6ece86707b120d7771" Jan 22 06:13:49 crc kubenswrapper[4982]: I0122 06:13:49.126154 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:13:49 crc kubenswrapper[4982]: E0122 06:13:49.126677 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:13:59 crc kubenswrapper[4982]: I0122 06:13:59.480417 4982 scope.go:117] "RemoveContainer" containerID="3981b59494056203a498dcf4f7d08ba27e65320accdde947f19aa23111fff153" Jan 22 06:14:03 crc kubenswrapper[4982]: I0122 06:14:03.723057 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:14:03 crc kubenswrapper[4982]: E0122 06:14:03.729779 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:14:18 crc kubenswrapper[4982]: I0122 06:14:18.719925 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:14:18 crc kubenswrapper[4982]: E0122 06:14:18.721056 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:14:31 crc kubenswrapper[4982]: I0122 06:14:31.719724 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:14:31 crc kubenswrapper[4982]: E0122 06:14:31.720578 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:14:46 crc kubenswrapper[4982]: I0122 06:14:46.719612 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:14:46 crc kubenswrapper[4982]: E0122 06:14:46.720646 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:14:57 crc kubenswrapper[4982]: I0122 06:14:57.719692 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:14:57 crc kubenswrapper[4982]: E0122 06:14:57.720418 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.573266 4982 scope.go:117] "RemoveContainer" containerID="8c0bf9c9c622b073e5e691a41bfe671bee3ec8a34d459fc686856f0e0eef9678" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.614866 4982 scope.go:117] "RemoveContainer" containerID="e10d5991711ac7163d6ba6df5bcfcc6f76d125dba4e9363a2de9e4bb3ba354b1" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.647806 4982 scope.go:117] "RemoveContainer" containerID="8da7b8a952ce6725bd6a1fdde48ae9f4e266b5c3da9591407a37d95476e23e5c" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.681833 4982 scope.go:117] "RemoveContainer" containerID="ea9f4629d193aa5031d52f0fba91720a98d8fd32d9a98e1f798ded1fb349ec0d" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.736399 4982 scope.go:117] "RemoveContainer" containerID="5dc16f0c54d8ae6a28c68a312b07df0a689510c87837e0a4b7e78b5b759f00ea" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.767067 4982 scope.go:117] "RemoveContainer" containerID="194a13dd0cc2fbf71ccad2e58ec061d6e3b65c7e8864d90cde9282b9aa6f28ee" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.784385 4982 scope.go:117] "RemoveContainer" containerID="baa68c4b84e0bd3b726cd5648b5cb36eaef6c60fc2ea86a61307f20df71f6fca" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.847575 4982 scope.go:117] "RemoveContainer" containerID="eb0c40ec25acf7dd254ab07ffed2db26083e872714d82529004cbcb0eb6f9efc" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.863343 4982 scope.go:117] "RemoveContainer" containerID="52150f5fa6e18d846be10060dab5d01e9c21ff0a57b517612e0f3a5ddd979624" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.897801 4982 scope.go:117] "RemoveContainer" containerID="726972a3f7460ea2bdb05ff75bb24b29dea55fb5c63cd73a84097c5546b25300" Jan 22 06:14:59 crc kubenswrapper[4982]: I0122 06:14:59.913125 4982 scope.go:117] "RemoveContainer" containerID="4e93f63aba0b979e2191d5bd9df573e67630655b32c7ca1e799256e191df8d37" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.159999 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz"] Jan 22 06:15:00 crc kubenswrapper[4982]: E0122 06:15:00.160287 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="extract-utilities" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.160304 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="extract-utilities" Jan 22 06:15:00 crc kubenswrapper[4982]: E0122 06:15:00.160328 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="extract-content" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.160335 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="extract-content" Jan 22 06:15:00 crc kubenswrapper[4982]: E0122 06:15:00.160344 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="registry-server" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.160352 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="registry-server" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.160521 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="59d491cf-c9aa-4f1e-9aaf-0d92b28d7362" containerName="registry-server" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.161098 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.165550 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.165779 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.299691 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz"] Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.396426 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsxr6\" (UniqueName: \"kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.396492 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.396520 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.499743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsxr6\" (UniqueName: \"kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.499830 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.499878 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.502773 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.508033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.522207 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsxr6\" (UniqueName: \"kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6\") pod \"collect-profiles-29484375-79vqz\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:00 crc kubenswrapper[4982]: I0122 06:15:00.779660 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:01 crc kubenswrapper[4982]: I0122 06:15:01.194872 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz"] Jan 22 06:15:01 crc kubenswrapper[4982]: I0122 06:15:01.775604 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b867dfc-0973-4a91-b2f7-6171931673d5" containerID="75c8df85e10d23282ba8c88a36c747457aca29e85c8fb1a5525fbb0ae643a2d6" exitCode=0 Jan 22 06:15:01 crc kubenswrapper[4982]: I0122 06:15:01.775686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" event={"ID":"2b867dfc-0973-4a91-b2f7-6171931673d5","Type":"ContainerDied","Data":"75c8df85e10d23282ba8c88a36c747457aca29e85c8fb1a5525fbb0ae643a2d6"} Jan 22 06:15:01 crc kubenswrapper[4982]: I0122 06:15:01.775893 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" event={"ID":"2b867dfc-0973-4a91-b2f7-6171931673d5","Type":"ContainerStarted","Data":"cd97ef3ffc4eb7536723613ccc839efe5520067fc6d80c0cf061c30effa3940f"} Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.170445 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.337335 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsxr6\" (UniqueName: \"kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6\") pod \"2b867dfc-0973-4a91-b2f7-6171931673d5\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.337429 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume\") pod \"2b867dfc-0973-4a91-b2f7-6171931673d5\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.337481 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume\") pod \"2b867dfc-0973-4a91-b2f7-6171931673d5\" (UID: \"2b867dfc-0973-4a91-b2f7-6171931673d5\") " Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.338706 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume" (OuterVolumeSpecName: "config-volume") pod "2b867dfc-0973-4a91-b2f7-6171931673d5" (UID: "2b867dfc-0973-4a91-b2f7-6171931673d5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.343429 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6" (OuterVolumeSpecName: "kube-api-access-wsxr6") pod "2b867dfc-0973-4a91-b2f7-6171931673d5" (UID: "2b867dfc-0973-4a91-b2f7-6171931673d5"). InnerVolumeSpecName "kube-api-access-wsxr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.343625 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2b867dfc-0973-4a91-b2f7-6171931673d5" (UID: "2b867dfc-0973-4a91-b2f7-6171931673d5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.439102 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b867dfc-0973-4a91-b2f7-6171931673d5-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.439139 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2b867dfc-0973-4a91-b2f7-6171931673d5-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.439149 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsxr6\" (UniqueName: \"kubernetes.io/projected/2b867dfc-0973-4a91-b2f7-6171931673d5-kube-api-access-wsxr6\") on node \"crc\" DevicePath \"\"" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.794679 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" event={"ID":"2b867dfc-0973-4a91-b2f7-6171931673d5","Type":"ContainerDied","Data":"cd97ef3ffc4eb7536723613ccc839efe5520067fc6d80c0cf061c30effa3940f"} Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.794727 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd97ef3ffc4eb7536723613ccc839efe5520067fc6d80c0cf061c30effa3940f" Jan 22 06:15:03 crc kubenswrapper[4982]: I0122 06:15:03.794800 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz" Jan 22 06:15:11 crc kubenswrapper[4982]: I0122 06:15:11.719826 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:15:11 crc kubenswrapper[4982]: E0122 06:15:11.720946 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:15:25 crc kubenswrapper[4982]: I0122 06:15:25.719305 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:15:25 crc kubenswrapper[4982]: E0122 06:15:25.720080 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:15:39 crc kubenswrapper[4982]: I0122 06:15:39.725129 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:15:39 crc kubenswrapper[4982]: E0122 06:15:39.725797 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:15:54 crc kubenswrapper[4982]: I0122 06:15:54.721118 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:15:54 crc kubenswrapper[4982]: E0122 06:15:54.722446 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:16:00 crc kubenswrapper[4982]: I0122 06:16:00.086959 4982 scope.go:117] "RemoveContainer" containerID="ac4f72c1c8b619faa864b7a4128f74ea5d0ae3cb46983cd3fb19b4ca11cd328d" Jan 22 06:16:05 crc kubenswrapper[4982]: I0122 06:16:05.718725 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:16:05 crc kubenswrapper[4982]: E0122 06:16:05.719767 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:16:19 crc kubenswrapper[4982]: I0122 06:16:19.748963 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:16:19 crc kubenswrapper[4982]: E0122 06:16:19.751966 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:16:31 crc kubenswrapper[4982]: I0122 06:16:31.720735 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:16:31 crc kubenswrapper[4982]: E0122 06:16:31.722102 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:16:45 crc kubenswrapper[4982]: I0122 06:16:45.719944 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:16:45 crc kubenswrapper[4982]: E0122 06:16:45.721209 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:16:57 crc kubenswrapper[4982]: I0122 06:16:57.719228 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:16:57 crc kubenswrapper[4982]: E0122 06:16:57.720429 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:17:08 crc kubenswrapper[4982]: I0122 06:17:08.719509 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:17:08 crc kubenswrapper[4982]: E0122 06:17:08.720547 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:17:21 crc kubenswrapper[4982]: I0122 06:17:21.726989 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:17:21 crc kubenswrapper[4982]: E0122 06:17:21.728250 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:17:32 crc kubenswrapper[4982]: I0122 06:17:32.719628 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:17:32 crc kubenswrapper[4982]: E0122 06:17:32.720740 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:17:44 crc kubenswrapper[4982]: I0122 06:17:44.719303 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:17:44 crc kubenswrapper[4982]: E0122 06:17:44.720356 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:17:59 crc kubenswrapper[4982]: I0122 06:17:59.723217 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:17:59 crc kubenswrapper[4982]: E0122 06:17:59.724024 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:18:13 crc kubenswrapper[4982]: I0122 06:18:13.719564 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:18:13 crc kubenswrapper[4982]: E0122 06:18:13.720310 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:18:28 crc kubenswrapper[4982]: I0122 06:18:28.719416 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:18:28 crc kubenswrapper[4982]: E0122 06:18:28.720605 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:18:42 crc kubenswrapper[4982]: I0122 06:18:42.720095 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:18:42 crc kubenswrapper[4982]: E0122 06:18:42.720989 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:18:53 crc kubenswrapper[4982]: I0122 06:18:53.719939 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:18:54 crc kubenswrapper[4982]: I0122 06:18:54.842476 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce"} Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.389101 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:39 crc kubenswrapper[4982]: E0122 06:19:39.389904 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b867dfc-0973-4a91-b2f7-6171931673d5" containerName="collect-profiles" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.389919 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b867dfc-0973-4a91-b2f7-6171931673d5" containerName="collect-profiles" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.390070 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b867dfc-0973-4a91-b2f7-6171931673d5" containerName="collect-profiles" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.391266 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.401703 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.477964 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhdws\" (UniqueName: \"kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.478064 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.478085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.579236 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.579289 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.579396 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhdws\" (UniqueName: \"kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.579737 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.579833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.600075 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhdws\" (UniqueName: \"kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws\") pod \"community-operators-n88xt\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:39 crc kubenswrapper[4982]: I0122 06:19:39.747774 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:40 crc kubenswrapper[4982]: I0122 06:19:40.350570 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:41 crc kubenswrapper[4982]: I0122 06:19:41.246492 4982 generic.go:334] "Generic (PLEG): container finished" podID="4bf09413-e530-4cc4-9350-461c49190aea" containerID="9bcb3918badd4cd0e5da4d6419e55d544162f8bd996b4b809540ef00fe29f1c7" exitCode=0 Jan 22 06:19:41 crc kubenswrapper[4982]: I0122 06:19:41.246566 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerDied","Data":"9bcb3918badd4cd0e5da4d6419e55d544162f8bd996b4b809540ef00fe29f1c7"} Jan 22 06:19:41 crc kubenswrapper[4982]: I0122 06:19:41.246814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerStarted","Data":"060b3fd93aa0b2fc10326190e1279f1ad240c23e13855477cc4b60f04f8874a0"} Jan 22 06:19:41 crc kubenswrapper[4982]: I0122 06:19:41.250815 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:19:43 crc kubenswrapper[4982]: I0122 06:19:43.266326 4982 generic.go:334] "Generic (PLEG): container finished" podID="4bf09413-e530-4cc4-9350-461c49190aea" containerID="04d8d67002bd504446c1caa5ed0177c0feb8a770c6981fcc1bd17363f2790982" exitCode=0 Jan 22 06:19:43 crc kubenswrapper[4982]: I0122 06:19:43.266437 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerDied","Data":"04d8d67002bd504446c1caa5ed0177c0feb8a770c6981fcc1bd17363f2790982"} Jan 22 06:19:44 crc kubenswrapper[4982]: I0122 06:19:44.278254 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerStarted","Data":"1cae47babfdcc0c835ebf7b9dfae25a9ed458d2342fc177f1a0a931e53885ab1"} Jan 22 06:19:44 crc kubenswrapper[4982]: I0122 06:19:44.298758 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n88xt" podStartSLOduration=2.761358918 podStartE2EDuration="5.29873354s" podCreationTimestamp="2026-01-22 06:19:39 +0000 UTC" firstStartedPulling="2026-01-22 06:19:41.250373674 +0000 UTC m=+2042.089011717" lastFinishedPulling="2026-01-22 06:19:43.787748286 +0000 UTC m=+2044.626386339" observedRunningTime="2026-01-22 06:19:44.295178505 +0000 UTC m=+2045.133816518" watchObservedRunningTime="2026-01-22 06:19:44.29873354 +0000 UTC m=+2045.137371553" Jan 22 06:19:49 crc kubenswrapper[4982]: I0122 06:19:49.748590 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:49 crc kubenswrapper[4982]: I0122 06:19:49.749168 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:49 crc kubenswrapper[4982]: I0122 06:19:49.818754 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:50 crc kubenswrapper[4982]: I0122 06:19:50.360626 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:50 crc kubenswrapper[4982]: I0122 06:19:50.409062 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.337522 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n88xt" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="registry-server" containerID="cri-o://1cae47babfdcc0c835ebf7b9dfae25a9ed458d2342fc177f1a0a931e53885ab1" gracePeriod=2 Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.475398 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.477621 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.493600 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.570764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.570919 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4xft\" (UniqueName: \"kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.571022 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.671865 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.671917 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4xft\" (UniqueName: \"kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.671969 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.672520 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.672571 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.696646 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4xft\" (UniqueName: \"kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft\") pod \"redhat-operators-jb6q9\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:52 crc kubenswrapper[4982]: I0122 06:19:52.826905 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:19:53 crc kubenswrapper[4982]: I0122 06:19:53.330523 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:19:53 crc kubenswrapper[4982]: W0122 06:19:53.341982 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf09418fd_cea0_4427_ba1d_7475ec00e9c2.slice/crio-573770ac1e2156917fab4ddfb6e1c0d898be049b4e88467de2db965d13d379b7 WatchSource:0}: Error finding container 573770ac1e2156917fab4ddfb6e1c0d898be049b4e88467de2db965d13d379b7: Status 404 returned error can't find the container with id 573770ac1e2156917fab4ddfb6e1c0d898be049b4e88467de2db965d13d379b7 Jan 22 06:19:53 crc kubenswrapper[4982]: I0122 06:19:53.347686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerStarted","Data":"573770ac1e2156917fab4ddfb6e1c0d898be049b4e88467de2db965d13d379b7"} Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.362760 4982 generic.go:334] "Generic (PLEG): container finished" podID="4bf09413-e530-4cc4-9350-461c49190aea" containerID="1cae47babfdcc0c835ebf7b9dfae25a9ed458d2342fc177f1a0a931e53885ab1" exitCode=0 Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.362894 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerDied","Data":"1cae47babfdcc0c835ebf7b9dfae25a9ed458d2342fc177f1a0a931e53885ab1"} Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.364958 4982 generic.go:334] "Generic (PLEG): container finished" podID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerID="f3d7c29ef2d445d18dde44367fda12d8bea70ecbcd0f653961e7cd812a7121aa" exitCode=0 Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.364990 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerDied","Data":"f3d7c29ef2d445d18dde44367fda12d8bea70ecbcd0f653961e7cd812a7121aa"} Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.653942 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.806159 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities\") pod \"4bf09413-e530-4cc4-9350-461c49190aea\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.806253 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhdws\" (UniqueName: \"kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws\") pod \"4bf09413-e530-4cc4-9350-461c49190aea\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.806292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content\") pod \"4bf09413-e530-4cc4-9350-461c49190aea\" (UID: \"4bf09413-e530-4cc4-9350-461c49190aea\") " Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.807167 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities" (OuterVolumeSpecName: "utilities") pod "4bf09413-e530-4cc4-9350-461c49190aea" (UID: "4bf09413-e530-4cc4-9350-461c49190aea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.819104 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws" (OuterVolumeSpecName: "kube-api-access-hhdws") pod "4bf09413-e530-4cc4-9350-461c49190aea" (UID: "4bf09413-e530-4cc4-9350-461c49190aea"). InnerVolumeSpecName "kube-api-access-hhdws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.819652 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.819704 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhdws\" (UniqueName: \"kubernetes.io/projected/4bf09413-e530-4cc4-9350-461c49190aea-kube-api-access-hhdws\") on node \"crc\" DevicePath \"\"" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.901414 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bf09413-e530-4cc4-9350-461c49190aea" (UID: "4bf09413-e530-4cc4-9350-461c49190aea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:19:54 crc kubenswrapper[4982]: I0122 06:19:54.920570 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bf09413-e530-4cc4-9350-461c49190aea-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.374145 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n88xt" event={"ID":"4bf09413-e530-4cc4-9350-461c49190aea","Type":"ContainerDied","Data":"060b3fd93aa0b2fc10326190e1279f1ad240c23e13855477cc4b60f04f8874a0"} Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.374479 4982 scope.go:117] "RemoveContainer" containerID="1cae47babfdcc0c835ebf7b9dfae25a9ed458d2342fc177f1a0a931e53885ab1" Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.374165 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n88xt" Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.386381 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerStarted","Data":"05ede68b123d1f14d41939fd50a7bdea1d64d298bf89153d086862280d560d85"} Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.387715 4982 scope.go:117] "RemoveContainer" containerID="04d8d67002bd504446c1caa5ed0177c0feb8a770c6981fcc1bd17363f2790982" Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.434693 4982 scope.go:117] "RemoveContainer" containerID="9bcb3918badd4cd0e5da4d6419e55d544162f8bd996b4b809540ef00fe29f1c7" Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.478902 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.494340 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n88xt"] Jan 22 06:19:55 crc kubenswrapper[4982]: I0122 06:19:55.726768 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bf09413-e530-4cc4-9350-461c49190aea" path="/var/lib/kubelet/pods/4bf09413-e530-4cc4-9350-461c49190aea/volumes" Jan 22 06:19:56 crc kubenswrapper[4982]: I0122 06:19:56.403491 4982 generic.go:334] "Generic (PLEG): container finished" podID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerID="05ede68b123d1f14d41939fd50a7bdea1d64d298bf89153d086862280d560d85" exitCode=0 Jan 22 06:19:56 crc kubenswrapper[4982]: I0122 06:19:56.403543 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerDied","Data":"05ede68b123d1f14d41939fd50a7bdea1d64d298bf89153d086862280d560d85"} Jan 22 06:19:57 crc kubenswrapper[4982]: I0122 06:19:57.414484 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerStarted","Data":"6d68f966b3a24e3401edc0ed22659159f997e90910616f98b8fd767db428f92b"} Jan 22 06:19:57 crc kubenswrapper[4982]: I0122 06:19:57.444070 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jb6q9" podStartSLOduration=3.020726398 podStartE2EDuration="5.444051873s" podCreationTimestamp="2026-01-22 06:19:52 +0000 UTC" firstStartedPulling="2026-01-22 06:19:54.366747791 +0000 UTC m=+2055.205385794" lastFinishedPulling="2026-01-22 06:19:56.790073256 +0000 UTC m=+2057.628711269" observedRunningTime="2026-01-22 06:19:57.439736887 +0000 UTC m=+2058.278374930" watchObservedRunningTime="2026-01-22 06:19:57.444051873 +0000 UTC m=+2058.282689876" Jan 22 06:20:00 crc kubenswrapper[4982]: I0122 06:20:00.470866 4982 scope.go:117] "RemoveContainer" containerID="16ccd364971b84f9e1aefde70ffe870dfce4ec745425bb661ac50bbf45b722c2" Jan 22 06:20:00 crc kubenswrapper[4982]: I0122 06:20:00.501378 4982 scope.go:117] "RemoveContainer" containerID="b6075a60d0588ab6384967f796d19e90797092838f29dca692e35dcc7a076930" Jan 22 06:20:00 crc kubenswrapper[4982]: I0122 06:20:00.523446 4982 scope.go:117] "RemoveContainer" containerID="808747365a4408bdd8f14959b9a92dbda405f118de1a655ee5611dad11349bc8" Jan 22 06:20:02 crc kubenswrapper[4982]: I0122 06:20:02.827272 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:02 crc kubenswrapper[4982]: I0122 06:20:02.827678 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:03 crc kubenswrapper[4982]: I0122 06:20:03.880199 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jb6q9" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="registry-server" probeResult="failure" output=< Jan 22 06:20:03 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 06:20:03 crc kubenswrapper[4982]: > Jan 22 06:20:12 crc kubenswrapper[4982]: I0122 06:20:12.907119 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:13 crc kubenswrapper[4982]: I0122 06:20:13.024234 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:13 crc kubenswrapper[4982]: I0122 06:20:13.161656 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:20:14 crc kubenswrapper[4982]: I0122 06:20:14.563169 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jb6q9" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="registry-server" containerID="cri-o://6d68f966b3a24e3401edc0ed22659159f997e90910616f98b8fd767db428f92b" gracePeriod=2 Jan 22 06:20:15 crc kubenswrapper[4982]: I0122 06:20:15.571693 4982 generic.go:334] "Generic (PLEG): container finished" podID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerID="6d68f966b3a24e3401edc0ed22659159f997e90910616f98b8fd767db428f92b" exitCode=0 Jan 22 06:20:15 crc kubenswrapper[4982]: I0122 06:20:15.571736 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerDied","Data":"6d68f966b3a24e3401edc0ed22659159f997e90910616f98b8fd767db428f92b"} Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.178936 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.357600 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities\") pod \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.357723 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4xft\" (UniqueName: \"kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft\") pod \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.358050 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content\") pod \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\" (UID: \"f09418fd-cea0-4427-ba1d-7475ec00e9c2\") " Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.359601 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities" (OuterVolumeSpecName: "utilities") pod "f09418fd-cea0-4427-ba1d-7475ec00e9c2" (UID: "f09418fd-cea0-4427-ba1d-7475ec00e9c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.369228 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft" (OuterVolumeSpecName: "kube-api-access-s4xft") pod "f09418fd-cea0-4427-ba1d-7475ec00e9c2" (UID: "f09418fd-cea0-4427-ba1d-7475ec00e9c2"). InnerVolumeSpecName "kube-api-access-s4xft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.459677 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.459721 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4xft\" (UniqueName: \"kubernetes.io/projected/f09418fd-cea0-4427-ba1d-7475ec00e9c2-kube-api-access-s4xft\") on node \"crc\" DevicePath \"\"" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.502775 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f09418fd-cea0-4427-ba1d-7475ec00e9c2" (UID: "f09418fd-cea0-4427-ba1d-7475ec00e9c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.561118 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f09418fd-cea0-4427-ba1d-7475ec00e9c2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.581838 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb6q9" event={"ID":"f09418fd-cea0-4427-ba1d-7475ec00e9c2","Type":"ContainerDied","Data":"573770ac1e2156917fab4ddfb6e1c0d898be049b4e88467de2db965d13d379b7"} Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.581912 4982 scope.go:117] "RemoveContainer" containerID="6d68f966b3a24e3401edc0ed22659159f997e90910616f98b8fd767db428f92b" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.581961 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb6q9" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.621589 4982 scope.go:117] "RemoveContainer" containerID="05ede68b123d1f14d41939fd50a7bdea1d64d298bf89153d086862280d560d85" Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.636318 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.652692 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jb6q9"] Jan 22 06:20:16 crc kubenswrapper[4982]: I0122 06:20:16.669644 4982 scope.go:117] "RemoveContainer" containerID="f3d7c29ef2d445d18dde44367fda12d8bea70ecbcd0f653961e7cd812a7121aa" Jan 22 06:20:17 crc kubenswrapper[4982]: I0122 06:20:17.730466 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" path="/var/lib/kubelet/pods/f09418fd-cea0-4427-ba1d-7475ec00e9c2/volumes" Jan 22 06:21:18 crc kubenswrapper[4982]: I0122 06:21:18.974145 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:21:18 crc kubenswrapper[4982]: I0122 06:21:18.974774 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:21:48 crc kubenswrapper[4982]: I0122 06:21:48.974430 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:21:48 crc kubenswrapper[4982]: I0122 06:21:48.975102 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:22:18 crc kubenswrapper[4982]: I0122 06:22:18.974336 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:22:18 crc kubenswrapper[4982]: I0122 06:22:18.975420 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:22:18 crc kubenswrapper[4982]: I0122 06:22:18.975500 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:22:18 crc kubenswrapper[4982]: I0122 06:22:18.976653 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:22:18 crc kubenswrapper[4982]: I0122 06:22:18.976744 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce" gracePeriod=600 Jan 22 06:22:19 crc kubenswrapper[4982]: I0122 06:22:19.899917 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce" exitCode=0 Jan 22 06:22:19 crc kubenswrapper[4982]: I0122 06:22:19.900262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce"} Jan 22 06:22:19 crc kubenswrapper[4982]: I0122 06:22:19.900302 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf"} Jan 22 06:22:19 crc kubenswrapper[4982]: I0122 06:22:19.900331 4982 scope.go:117] "RemoveContainer" containerID="bd26b2fce69a0bbe494a0b167df0474522baf189df9031f3cbf4529bc59d8ad2" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.324289 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325142 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325155 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325168 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="extract-utilities" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325174 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="extract-utilities" Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325182 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="extract-content" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325188 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="extract-content" Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325201 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="extract-content" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325207 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="extract-content" Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325216 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="extract-utilities" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325222 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="extract-utilities" Jan 22 06:22:39 crc kubenswrapper[4982]: E0122 06:22:39.325238 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325244 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325376 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f09418fd-cea0-4427-ba1d-7475ec00e9c2" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.325391 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bf09413-e530-4cc4-9350-461c49190aea" containerName="registry-server" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.326359 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.339691 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.342735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.342792 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw8jt\" (UniqueName: \"kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.342944 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.443834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.443965 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.443998 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw8jt\" (UniqueName: \"kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.444351 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.444406 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.465271 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw8jt\" (UniqueName: \"kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt\") pod \"certified-operators-9l4zm\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:39 crc kubenswrapper[4982]: I0122 06:22:39.646567 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:40 crc kubenswrapper[4982]: I0122 06:22:40.108416 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:41 crc kubenswrapper[4982]: I0122 06:22:41.094074 4982 generic.go:334] "Generic (PLEG): container finished" podID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerID="a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0" exitCode=0 Jan 22 06:22:41 crc kubenswrapper[4982]: I0122 06:22:41.094140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerDied","Data":"a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0"} Jan 22 06:22:41 crc kubenswrapper[4982]: I0122 06:22:41.096418 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerStarted","Data":"713c97cac224e986d827cd536f1de1ac5e89a2bc2ce061bcf98c433f949b3c25"} Jan 22 06:22:42 crc kubenswrapper[4982]: I0122 06:22:42.114887 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerStarted","Data":"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05"} Jan 22 06:22:43 crc kubenswrapper[4982]: I0122 06:22:43.127164 4982 generic.go:334] "Generic (PLEG): container finished" podID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerID="8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05" exitCode=0 Jan 22 06:22:43 crc kubenswrapper[4982]: I0122 06:22:43.127280 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerDied","Data":"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05"} Jan 22 06:22:43 crc kubenswrapper[4982]: I0122 06:22:43.127644 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerStarted","Data":"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b"} Jan 22 06:22:43 crc kubenswrapper[4982]: I0122 06:22:43.157520 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9l4zm" podStartSLOduration=2.72149405 podStartE2EDuration="4.157494596s" podCreationTimestamp="2026-01-22 06:22:39 +0000 UTC" firstStartedPulling="2026-01-22 06:22:41.09647369 +0000 UTC m=+2221.935111693" lastFinishedPulling="2026-01-22 06:22:42.532474206 +0000 UTC m=+2223.371112239" observedRunningTime="2026-01-22 06:22:43.150617092 +0000 UTC m=+2223.989255095" watchObservedRunningTime="2026-01-22 06:22:43.157494596 +0000 UTC m=+2223.996132609" Jan 22 06:22:49 crc kubenswrapper[4982]: I0122 06:22:49.647017 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:49 crc kubenswrapper[4982]: I0122 06:22:49.647576 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:49 crc kubenswrapper[4982]: I0122 06:22:49.713905 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:50 crc kubenswrapper[4982]: I0122 06:22:50.238177 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:50 crc kubenswrapper[4982]: I0122 06:22:50.290356 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:52 crc kubenswrapper[4982]: I0122 06:22:52.230969 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9l4zm" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="registry-server" containerID="cri-o://0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b" gracePeriod=2 Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.156645 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.242440 4982 generic.go:334] "Generic (PLEG): container finished" podID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerID="0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b" exitCode=0 Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.242492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerDied","Data":"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b"} Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.242523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l4zm" event={"ID":"225199c7-17b2-4ee0-a5a4-a33b8c96c46a","Type":"ContainerDied","Data":"713c97cac224e986d827cd536f1de1ac5e89a2bc2ce061bcf98c433f949b3c25"} Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.242543 4982 scope.go:117] "RemoveContainer" containerID="0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.242591 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l4zm" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.264407 4982 scope.go:117] "RemoveContainer" containerID="8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.269283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content\") pod \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.269378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities\") pod \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.269425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw8jt\" (UniqueName: \"kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt\") pod \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\" (UID: \"225199c7-17b2-4ee0-a5a4-a33b8c96c46a\") " Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.270535 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities" (OuterVolumeSpecName: "utilities") pod "225199c7-17b2-4ee0-a5a4-a33b8c96c46a" (UID: "225199c7-17b2-4ee0-a5a4-a33b8c96c46a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.279107 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt" (OuterVolumeSpecName: "kube-api-access-tw8jt") pod "225199c7-17b2-4ee0-a5a4-a33b8c96c46a" (UID: "225199c7-17b2-4ee0-a5a4-a33b8c96c46a"). InnerVolumeSpecName "kube-api-access-tw8jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.284435 4982 scope.go:117] "RemoveContainer" containerID="a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.323273 4982 scope.go:117] "RemoveContainer" containerID="0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b" Jan 22 06:22:53 crc kubenswrapper[4982]: E0122 06:22:53.323760 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b\": container with ID starting with 0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b not found: ID does not exist" containerID="0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.323793 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b"} err="failed to get container status \"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b\": rpc error: code = NotFound desc = could not find container \"0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b\": container with ID starting with 0168ef28220e30d9d1380ab4df81bc3a622020fda5d483fbce94cf18faf9408b not found: ID does not exist" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.323814 4982 scope.go:117] "RemoveContainer" containerID="8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05" Jan 22 06:22:53 crc kubenswrapper[4982]: E0122 06:22:53.324118 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05\": container with ID starting with 8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05 not found: ID does not exist" containerID="8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.324138 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05"} err="failed to get container status \"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05\": rpc error: code = NotFound desc = could not find container \"8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05\": container with ID starting with 8d60462a5423efc5cf88b1c6a5af20f1f9ee78f49fb8171cfce4ae3b09e51d05 not found: ID does not exist" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.324151 4982 scope.go:117] "RemoveContainer" containerID="a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0" Jan 22 06:22:53 crc kubenswrapper[4982]: E0122 06:22:53.324481 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0\": container with ID starting with a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0 not found: ID does not exist" containerID="a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.324551 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0"} err="failed to get container status \"a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0\": rpc error: code = NotFound desc = could not find container \"a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0\": container with ID starting with a2c17ff6f60cedbec3aed1500378c19e160be79225ae9bffe16dab331c3c66b0 not found: ID does not exist" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.351361 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "225199c7-17b2-4ee0-a5a4-a33b8c96c46a" (UID: "225199c7-17b2-4ee0-a5a4-a33b8c96c46a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.372302 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.374113 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.374161 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw8jt\" (UniqueName: \"kubernetes.io/projected/225199c7-17b2-4ee0-a5a4-a33b8c96c46a-kube-api-access-tw8jt\") on node \"crc\" DevicePath \"\"" Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.597059 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.606005 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9l4zm"] Jan 22 06:22:53 crc kubenswrapper[4982]: I0122 06:22:53.735320 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" path="/var/lib/kubelet/pods/225199c7-17b2-4ee0-a5a4-a33b8c96c46a/volumes" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.965105 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:23:57 crc kubenswrapper[4982]: E0122 06:23:57.966304 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="extract-content" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.966333 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="extract-content" Jan 22 06:23:57 crc kubenswrapper[4982]: E0122 06:23:57.966388 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="extract-utilities" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.966406 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="extract-utilities" Jan 22 06:23:57 crc kubenswrapper[4982]: E0122 06:23:57.966439 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="registry-server" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.966455 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="registry-server" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.966736 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="225199c7-17b2-4ee0-a5a4-a33b8c96c46a" containerName="registry-server" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.968737 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:57 crc kubenswrapper[4982]: I0122 06:23:57.976493 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.010672 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rrnx\" (UniqueName: \"kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.010728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.011061 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.112353 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.112535 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rrnx\" (UniqueName: \"kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.112580 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.112957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.113245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.142376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rrnx\" (UniqueName: \"kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx\") pod \"redhat-marketplace-mr572\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.306742 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.782229 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:23:58 crc kubenswrapper[4982]: I0122 06:23:58.836918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerStarted","Data":"841bd49fd1a89bc928071fb7c9fdba5782102417db3e767d6cac5b2788cf3457"} Jan 22 06:23:59 crc kubenswrapper[4982]: I0122 06:23:59.847895 4982 generic.go:334] "Generic (PLEG): container finished" podID="360dccc1-e043-4435-be75-496cf095c47a" containerID="6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79" exitCode=0 Jan 22 06:23:59 crc kubenswrapper[4982]: I0122 06:23:59.847967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerDied","Data":"6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79"} Jan 22 06:24:00 crc kubenswrapper[4982]: I0122 06:24:00.860055 4982 generic.go:334] "Generic (PLEG): container finished" podID="360dccc1-e043-4435-be75-496cf095c47a" containerID="1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4" exitCode=0 Jan 22 06:24:00 crc kubenswrapper[4982]: I0122 06:24:00.860293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerDied","Data":"1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4"} Jan 22 06:24:02 crc kubenswrapper[4982]: I0122 06:24:02.878384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerStarted","Data":"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686"} Jan 22 06:24:02 crc kubenswrapper[4982]: I0122 06:24:02.904401 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mr572" podStartSLOduration=3.180410328 podStartE2EDuration="5.904375432s" podCreationTimestamp="2026-01-22 06:23:57 +0000 UTC" firstStartedPulling="2026-01-22 06:23:59.849968627 +0000 UTC m=+2300.688606630" lastFinishedPulling="2026-01-22 06:24:02.573933721 +0000 UTC m=+2303.412571734" observedRunningTime="2026-01-22 06:24:02.899823689 +0000 UTC m=+2303.738461752" watchObservedRunningTime="2026-01-22 06:24:02.904375432 +0000 UTC m=+2303.743013475" Jan 22 06:24:08 crc kubenswrapper[4982]: I0122 06:24:08.307444 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:08 crc kubenswrapper[4982]: I0122 06:24:08.308116 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:08 crc kubenswrapper[4982]: I0122 06:24:08.366154 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:08 crc kubenswrapper[4982]: I0122 06:24:08.989136 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:09 crc kubenswrapper[4982]: I0122 06:24:09.051848 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:24:10 crc kubenswrapper[4982]: I0122 06:24:10.940690 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mr572" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="registry-server" containerID="cri-o://f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686" gracePeriod=2 Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.927663 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.958653 4982 generic.go:334] "Generic (PLEG): container finished" podID="360dccc1-e043-4435-be75-496cf095c47a" containerID="f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686" exitCode=0 Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.958705 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerDied","Data":"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686"} Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.958727 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mr572" Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.958751 4982 scope.go:117] "RemoveContainer" containerID="f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686" Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.958738 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mr572" event={"ID":"360dccc1-e043-4435-be75-496cf095c47a","Type":"ContainerDied","Data":"841bd49fd1a89bc928071fb7c9fdba5782102417db3e767d6cac5b2788cf3457"} Jan 22 06:24:11 crc kubenswrapper[4982]: I0122 06:24:11.994252 4982 scope.go:117] "RemoveContainer" containerID="1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.027192 4982 scope.go:117] "RemoveContainer" containerID="6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.027996 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content\") pod \"360dccc1-e043-4435-be75-496cf095c47a\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.028082 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rrnx\" (UniqueName: \"kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx\") pod \"360dccc1-e043-4435-be75-496cf095c47a\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.028146 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities\") pod \"360dccc1-e043-4435-be75-496cf095c47a\" (UID: \"360dccc1-e043-4435-be75-496cf095c47a\") " Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.029179 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities" (OuterVolumeSpecName: "utilities") pod "360dccc1-e043-4435-be75-496cf095c47a" (UID: "360dccc1-e043-4435-be75-496cf095c47a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.037038 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx" (OuterVolumeSpecName: "kube-api-access-7rrnx") pod "360dccc1-e043-4435-be75-496cf095c47a" (UID: "360dccc1-e043-4435-be75-496cf095c47a"). InnerVolumeSpecName "kube-api-access-7rrnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.050901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "360dccc1-e043-4435-be75-496cf095c47a" (UID: "360dccc1-e043-4435-be75-496cf095c47a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.080649 4982 scope.go:117] "RemoveContainer" containerID="f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686" Jan 22 06:24:12 crc kubenswrapper[4982]: E0122 06:24:12.081296 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686\": container with ID starting with f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686 not found: ID does not exist" containerID="f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.081332 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686"} err="failed to get container status \"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686\": rpc error: code = NotFound desc = could not find container \"f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686\": container with ID starting with f46a0aad93f2621777b55085a71693e9c064cb0b3ff78db8c7def54924914686 not found: ID does not exist" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.081355 4982 scope.go:117] "RemoveContainer" containerID="1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4" Jan 22 06:24:12 crc kubenswrapper[4982]: E0122 06:24:12.081638 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4\": container with ID starting with 1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4 not found: ID does not exist" containerID="1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.081684 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4"} err="failed to get container status \"1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4\": rpc error: code = NotFound desc = could not find container \"1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4\": container with ID starting with 1e607422b51db950689094f3d726cfc9f9018224b2ee33c8c9472e393c0f05e4 not found: ID does not exist" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.081711 4982 scope.go:117] "RemoveContainer" containerID="6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79" Jan 22 06:24:12 crc kubenswrapper[4982]: E0122 06:24:12.082021 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79\": container with ID starting with 6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79 not found: ID does not exist" containerID="6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.082048 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79"} err="failed to get container status \"6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79\": rpc error: code = NotFound desc = could not find container \"6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79\": container with ID starting with 6e861e1c68c5659204d7e2df3aec6dda9964695308d4d8b31b3c5a34eb531f79 not found: ID does not exist" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.129679 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.129717 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rrnx\" (UniqueName: \"kubernetes.io/projected/360dccc1-e043-4435-be75-496cf095c47a-kube-api-access-7rrnx\") on node \"crc\" DevicePath \"\"" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.129772 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/360dccc1-e043-4435-be75-496cf095c47a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.327256 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:24:12 crc kubenswrapper[4982]: I0122 06:24:12.339917 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mr572"] Jan 22 06:24:13 crc kubenswrapper[4982]: I0122 06:24:13.729619 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="360dccc1-e043-4435-be75-496cf095c47a" path="/var/lib/kubelet/pods/360dccc1-e043-4435-be75-496cf095c47a/volumes" Jan 22 06:24:48 crc kubenswrapper[4982]: I0122 06:24:48.974362 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:24:48 crc kubenswrapper[4982]: I0122 06:24:48.975189 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:25:18 crc kubenswrapper[4982]: I0122 06:25:18.974319 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:25:18 crc kubenswrapper[4982]: I0122 06:25:18.974960 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:25:48 crc kubenswrapper[4982]: I0122 06:25:48.974268 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:25:48 crc kubenswrapper[4982]: I0122 06:25:48.974851 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:25:48 crc kubenswrapper[4982]: I0122 06:25:48.974922 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:25:48 crc kubenswrapper[4982]: I0122 06:25:48.975629 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:25:48 crc kubenswrapper[4982]: I0122 06:25:48.975692 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" gracePeriod=600 Jan 22 06:25:49 crc kubenswrapper[4982]: E0122 06:25:49.109684 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:25:49 crc kubenswrapper[4982]: I0122 06:25:49.812266 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" exitCode=0 Jan 22 06:25:49 crc kubenswrapper[4982]: I0122 06:25:49.812318 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf"} Jan 22 06:25:49 crc kubenswrapper[4982]: I0122 06:25:49.812423 4982 scope.go:117] "RemoveContainer" containerID="8331e5fc2740e88c3621d8b716829aafc346943b6dd36e5c79bdcce5649079ce" Jan 22 06:25:49 crc kubenswrapper[4982]: I0122 06:25:49.813214 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:25:49 crc kubenswrapper[4982]: E0122 06:25:49.813631 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:26:04 crc kubenswrapper[4982]: I0122 06:26:04.719196 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:26:04 crc kubenswrapper[4982]: E0122 06:26:04.720337 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:26:17 crc kubenswrapper[4982]: I0122 06:26:17.719208 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:26:17 crc kubenswrapper[4982]: E0122 06:26:17.719823 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:26:30 crc kubenswrapper[4982]: I0122 06:26:30.719980 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:26:30 crc kubenswrapper[4982]: E0122 06:26:30.720976 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:26:43 crc kubenswrapper[4982]: I0122 06:26:43.719314 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:26:43 crc kubenswrapper[4982]: E0122 06:26:43.722360 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:26:56 crc kubenswrapper[4982]: I0122 06:26:56.719921 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:26:56 crc kubenswrapper[4982]: E0122 06:26:56.720444 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:27:08 crc kubenswrapper[4982]: I0122 06:27:08.719433 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:27:08 crc kubenswrapper[4982]: E0122 06:27:08.720694 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:27:23 crc kubenswrapper[4982]: I0122 06:27:23.719343 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:27:23 crc kubenswrapper[4982]: E0122 06:27:23.720076 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:27:35 crc kubenswrapper[4982]: I0122 06:27:35.721033 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:27:35 crc kubenswrapper[4982]: E0122 06:27:35.721985 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:27:47 crc kubenswrapper[4982]: I0122 06:27:47.719979 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:27:47 crc kubenswrapper[4982]: E0122 06:27:47.721188 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:28:00 crc kubenswrapper[4982]: I0122 06:28:00.719962 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:28:00 crc kubenswrapper[4982]: E0122 06:28:00.721172 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:28:13 crc kubenswrapper[4982]: I0122 06:28:13.720066 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:28:13 crc kubenswrapper[4982]: E0122 06:28:13.720939 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:28:24 crc kubenswrapper[4982]: I0122 06:28:24.719414 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:28:24 crc kubenswrapper[4982]: E0122 06:28:24.720663 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:28:35 crc kubenswrapper[4982]: I0122 06:28:35.720064 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:28:35 crc kubenswrapper[4982]: E0122 06:28:35.725709 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:28:49 crc kubenswrapper[4982]: I0122 06:28:49.730130 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:28:49 crc kubenswrapper[4982]: E0122 06:28:49.733692 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:29:02 crc kubenswrapper[4982]: I0122 06:29:02.721306 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:29:02 crc kubenswrapper[4982]: E0122 06:29:02.722571 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:29:15 crc kubenswrapper[4982]: I0122 06:29:15.720064 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:29:15 crc kubenswrapper[4982]: E0122 06:29:15.721458 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:29:27 crc kubenswrapper[4982]: I0122 06:29:27.719927 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:29:27 crc kubenswrapper[4982]: E0122 06:29:27.720971 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:29:39 crc kubenswrapper[4982]: I0122 06:29:39.725033 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:29:39 crc kubenswrapper[4982]: E0122 06:29:39.726253 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:29:54 crc kubenswrapper[4982]: I0122 06:29:54.719562 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:29:54 crc kubenswrapper[4982]: E0122 06:29:54.720599 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.176142 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh"] Jan 22 06:30:00 crc kubenswrapper[4982]: E0122 06:30:00.179499 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="registry-server" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.179531 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="registry-server" Jan 22 06:30:00 crc kubenswrapper[4982]: E0122 06:30:00.179596 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="extract-utilities" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.179615 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="extract-utilities" Jan 22 06:30:00 crc kubenswrapper[4982]: E0122 06:30:00.179635 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="extract-content" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.179654 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="extract-content" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.180692 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="360dccc1-e043-4435-be75-496cf095c47a" containerName="registry-server" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.181495 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.194517 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.194604 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.227165 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh"] Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.345631 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.345734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lwn8\" (UniqueName: \"kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.345824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.447008 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.447129 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lwn8\" (UniqueName: \"kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.447187 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.448701 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.460987 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.479088 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lwn8\" (UniqueName: \"kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8\") pod \"collect-profiles-29484390-svtdh\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.512689 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:00 crc kubenswrapper[4982]: I0122 06:30:00.765496 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh"] Jan 22 06:30:01 crc kubenswrapper[4982]: I0122 06:30:01.007490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" event={"ID":"d534a1e1-d6f5-4ceb-9129-51b4761a3745","Type":"ContainerStarted","Data":"b82e6c43895cebfb4bb14e31e44685579305a7be4e90848cdf322215a5d1f29b"} Jan 22 06:30:01 crc kubenswrapper[4982]: I0122 06:30:01.007779 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" event={"ID":"d534a1e1-d6f5-4ceb-9129-51b4761a3745","Type":"ContainerStarted","Data":"83970b3be1cfb8c8ae89ffa17c079cd2fdd9f1a4fbef4b44a4ee48002f5106fd"} Jan 22 06:30:01 crc kubenswrapper[4982]: I0122 06:30:01.045483 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" podStartSLOduration=1.045454236 podStartE2EDuration="1.045454236s" podCreationTimestamp="2026-01-22 06:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 06:30:01.03065557 +0000 UTC m=+2661.869293613" watchObservedRunningTime="2026-01-22 06:30:01.045454236 +0000 UTC m=+2661.884092279" Jan 22 06:30:02 crc kubenswrapper[4982]: I0122 06:30:02.018239 4982 generic.go:334] "Generic (PLEG): container finished" podID="d534a1e1-d6f5-4ceb-9129-51b4761a3745" containerID="b82e6c43895cebfb4bb14e31e44685579305a7be4e90848cdf322215a5d1f29b" exitCode=0 Jan 22 06:30:02 crc kubenswrapper[4982]: I0122 06:30:02.018313 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" event={"ID":"d534a1e1-d6f5-4ceb-9129-51b4761a3745","Type":"ContainerDied","Data":"b82e6c43895cebfb4bb14e31e44685579305a7be4e90848cdf322215a5d1f29b"} Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.418145 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.592951 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lwn8\" (UniqueName: \"kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8\") pod \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.593195 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume\") pod \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.593238 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume\") pod \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\" (UID: \"d534a1e1-d6f5-4ceb-9129-51b4761a3745\") " Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.594397 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume" (OuterVolumeSpecName: "config-volume") pod "d534a1e1-d6f5-4ceb-9129-51b4761a3745" (UID: "d534a1e1-d6f5-4ceb-9129-51b4761a3745"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.601552 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8" (OuterVolumeSpecName: "kube-api-access-5lwn8") pod "d534a1e1-d6f5-4ceb-9129-51b4761a3745" (UID: "d534a1e1-d6f5-4ceb-9129-51b4761a3745"). InnerVolumeSpecName "kube-api-access-5lwn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.602238 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d534a1e1-d6f5-4ceb-9129-51b4761a3745" (UID: "d534a1e1-d6f5-4ceb-9129-51b4761a3745"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.695368 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d534a1e1-d6f5-4ceb-9129-51b4761a3745-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.695457 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d534a1e1-d6f5-4ceb-9129-51b4761a3745-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:03 crc kubenswrapper[4982]: I0122 06:30:03.695479 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lwn8\" (UniqueName: \"kubernetes.io/projected/d534a1e1-d6f5-4ceb-9129-51b4761a3745-kube-api-access-5lwn8\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:04 crc kubenswrapper[4982]: I0122 06:30:04.042161 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" event={"ID":"d534a1e1-d6f5-4ceb-9129-51b4761a3745","Type":"ContainerDied","Data":"83970b3be1cfb8c8ae89ffa17c079cd2fdd9f1a4fbef4b44a4ee48002f5106fd"} Jan 22 06:30:04 crc kubenswrapper[4982]: I0122 06:30:04.042225 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83970b3be1cfb8c8ae89ffa17c079cd2fdd9f1a4fbef4b44a4ee48002f5106fd" Jan 22 06:30:04 crc kubenswrapper[4982]: I0122 06:30:04.042256 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh" Jan 22 06:30:04 crc kubenswrapper[4982]: I0122 06:30:04.522507 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j"] Jan 22 06:30:04 crc kubenswrapper[4982]: I0122 06:30:04.532823 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484345-fvh4j"] Jan 22 06:30:05 crc kubenswrapper[4982]: I0122 06:30:05.719920 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:30:05 crc kubenswrapper[4982]: E0122 06:30:05.720703 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:30:05 crc kubenswrapper[4982]: I0122 06:30:05.734756 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf" path="/var/lib/kubelet/pods/84e72309-bc5e-4a7d-8dc9-4c5a3f4909cf/volumes" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.323054 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:18 crc kubenswrapper[4982]: E0122 06:30:18.325286 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d534a1e1-d6f5-4ceb-9129-51b4761a3745" containerName="collect-profiles" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.325410 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d534a1e1-d6f5-4ceb-9129-51b4761a3745" containerName="collect-profiles" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.325683 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d534a1e1-d6f5-4ceb-9129-51b4761a3745" containerName="collect-profiles" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.327084 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.355598 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.443565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqls2\" (UniqueName: \"kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.443860 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.443954 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.545049 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqls2\" (UniqueName: \"kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.545124 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.545199 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.545821 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.546040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.568353 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqls2\" (UniqueName: \"kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2\") pod \"redhat-operators-chlml\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:18 crc kubenswrapper[4982]: I0122 06:30:18.652672 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:19 crc kubenswrapper[4982]: I0122 06:30:19.118299 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:19 crc kubenswrapper[4982]: W0122 06:30:19.126665 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0de91d7_e82c_40a6_8ef3_d057c2b762d3.slice/crio-a73ff1b6eb1c420f1cbf5a3dcd92fd99e831f6088462b2a91a0c4bb7817ccee2 WatchSource:0}: Error finding container a73ff1b6eb1c420f1cbf5a3dcd92fd99e831f6088462b2a91a0c4bb7817ccee2: Status 404 returned error can't find the container with id a73ff1b6eb1c420f1cbf5a3dcd92fd99e831f6088462b2a91a0c4bb7817ccee2 Jan 22 06:30:19 crc kubenswrapper[4982]: I0122 06:30:19.177104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerStarted","Data":"a73ff1b6eb1c420f1cbf5a3dcd92fd99e831f6088462b2a91a0c4bb7817ccee2"} Jan 22 06:30:19 crc kubenswrapper[4982]: I0122 06:30:19.733629 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:30:19 crc kubenswrapper[4982]: E0122 06:30:19.734655 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:30:20 crc kubenswrapper[4982]: I0122 06:30:20.191762 4982 generic.go:334] "Generic (PLEG): container finished" podID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerID="16ff5ec4ff61132ab8f91af4117d581de5e8bd03da8bd0f88b01cecfb368d2f4" exitCode=0 Jan 22 06:30:20 crc kubenswrapper[4982]: I0122 06:30:20.191828 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerDied","Data":"16ff5ec4ff61132ab8f91af4117d581de5e8bd03da8bd0f88b01cecfb368d2f4"} Jan 22 06:30:20 crc kubenswrapper[4982]: I0122 06:30:20.195071 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:30:21 crc kubenswrapper[4982]: I0122 06:30:21.202363 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerStarted","Data":"919bfd6eaa3d06b10e88235abe3146a80cf64c200497a8ab6c2711e48f8690e5"} Jan 22 06:30:22 crc kubenswrapper[4982]: I0122 06:30:22.211369 4982 generic.go:334] "Generic (PLEG): container finished" podID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerID="919bfd6eaa3d06b10e88235abe3146a80cf64c200497a8ab6c2711e48f8690e5" exitCode=0 Jan 22 06:30:22 crc kubenswrapper[4982]: I0122 06:30:22.211414 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerDied","Data":"919bfd6eaa3d06b10e88235abe3146a80cf64c200497a8ab6c2711e48f8690e5"} Jan 22 06:30:23 crc kubenswrapper[4982]: I0122 06:30:23.222725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerStarted","Data":"f4f61882f95de5baf0bbc1f7fd6d05c52258376845a593079accc4379b00df6f"} Jan 22 06:30:23 crc kubenswrapper[4982]: I0122 06:30:23.251075 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-chlml" podStartSLOduration=2.848058257 podStartE2EDuration="5.251051041s" podCreationTimestamp="2026-01-22 06:30:18 +0000 UTC" firstStartedPulling="2026-01-22 06:30:20.194756948 +0000 UTC m=+2681.033394961" lastFinishedPulling="2026-01-22 06:30:22.597749732 +0000 UTC m=+2683.436387745" observedRunningTime="2026-01-22 06:30:23.250627299 +0000 UTC m=+2684.089265332" watchObservedRunningTime="2026-01-22 06:30:23.251051041 +0000 UTC m=+2684.089689064" Jan 22 06:30:28 crc kubenswrapper[4982]: I0122 06:30:28.653707 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:28 crc kubenswrapper[4982]: I0122 06:30:28.654480 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:29 crc kubenswrapper[4982]: I0122 06:30:29.721070 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-chlml" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="registry-server" probeResult="failure" output=< Jan 22 06:30:29 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 06:30:29 crc kubenswrapper[4982]: > Jan 22 06:30:33 crc kubenswrapper[4982]: I0122 06:30:33.719543 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:30:33 crc kubenswrapper[4982]: E0122 06:30:33.720531 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:30:38 crc kubenswrapper[4982]: I0122 06:30:38.735442 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:38 crc kubenswrapper[4982]: I0122 06:30:38.807776 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:38 crc kubenswrapper[4982]: I0122 06:30:38.993328 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:40 crc kubenswrapper[4982]: I0122 06:30:40.402983 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-chlml" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="registry-server" containerID="cri-o://f4f61882f95de5baf0bbc1f7fd6d05c52258376845a593079accc4379b00df6f" gracePeriod=2 Jan 22 06:30:41 crc kubenswrapper[4982]: I0122 06:30:41.413221 4982 generic.go:334] "Generic (PLEG): container finished" podID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerID="f4f61882f95de5baf0bbc1f7fd6d05c52258376845a593079accc4379b00df6f" exitCode=0 Jan 22 06:30:41 crc kubenswrapper[4982]: I0122 06:30:41.413466 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerDied","Data":"f4f61882f95de5baf0bbc1f7fd6d05c52258376845a593079accc4379b00df6f"} Jan 22 06:30:41 crc kubenswrapper[4982]: I0122 06:30:41.954062 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.149445 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content\") pod \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.149618 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqls2\" (UniqueName: \"kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2\") pod \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.149699 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities\") pod \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\" (UID: \"f0de91d7-e82c-40a6-8ef3-d057c2b762d3\") " Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.150901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities" (OuterVolumeSpecName: "utilities") pod "f0de91d7-e82c-40a6-8ef3-d057c2b762d3" (UID: "f0de91d7-e82c-40a6-8ef3-d057c2b762d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.159972 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2" (OuterVolumeSpecName: "kube-api-access-wqls2") pod "f0de91d7-e82c-40a6-8ef3-d057c2b762d3" (UID: "f0de91d7-e82c-40a6-8ef3-d057c2b762d3"). InnerVolumeSpecName "kube-api-access-wqls2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.251891 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqls2\" (UniqueName: \"kubernetes.io/projected/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-kube-api-access-wqls2\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.251942 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.336494 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0de91d7-e82c-40a6-8ef3-d057c2b762d3" (UID: "f0de91d7-e82c-40a6-8ef3-d057c2b762d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.352930 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0de91d7-e82c-40a6-8ef3-d057c2b762d3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.425270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chlml" event={"ID":"f0de91d7-e82c-40a6-8ef3-d057c2b762d3","Type":"ContainerDied","Data":"a73ff1b6eb1c420f1cbf5a3dcd92fd99e831f6088462b2a91a0c4bb7817ccee2"} Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.425315 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chlml" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.425348 4982 scope.go:117] "RemoveContainer" containerID="f4f61882f95de5baf0bbc1f7fd6d05c52258376845a593079accc4379b00df6f" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.479722 4982 scope.go:117] "RemoveContainer" containerID="919bfd6eaa3d06b10e88235abe3146a80cf64c200497a8ab6c2711e48f8690e5" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.497206 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.510816 4982 scope.go:117] "RemoveContainer" containerID="16ff5ec4ff61132ab8f91af4117d581de5e8bd03da8bd0f88b01cecfb368d2f4" Jan 22 06:30:42 crc kubenswrapper[4982]: I0122 06:30:42.515390 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-chlml"] Jan 22 06:30:43 crc kubenswrapper[4982]: I0122 06:30:43.739469 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" path="/var/lib/kubelet/pods/f0de91d7-e82c-40a6-8ef3-d057c2b762d3/volumes" Jan 22 06:30:48 crc kubenswrapper[4982]: I0122 06:30:48.719932 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:30:48 crc kubenswrapper[4982]: E0122 06:30:48.720566 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:31:00 crc kubenswrapper[4982]: I0122 06:31:00.720052 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:31:00 crc kubenswrapper[4982]: I0122 06:31:00.792076 4982 scope.go:117] "RemoveContainer" containerID="d6457f592a30939a148971496a64879ea48cdf141d58e9a64d5571c0020e1282" Jan 22 06:31:01 crc kubenswrapper[4982]: I0122 06:31:01.597754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae"} Jan 22 06:33:18 crc kubenswrapper[4982]: I0122 06:33:18.974556 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:33:18 crc kubenswrapper[4982]: I0122 06:33:18.975232 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:33:48 crc kubenswrapper[4982]: I0122 06:33:48.974391 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:33:48 crc kubenswrapper[4982]: I0122 06:33:48.975158 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:34:18 crc kubenswrapper[4982]: I0122 06:34:18.974377 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:34:18 crc kubenswrapper[4982]: I0122 06:34:18.974924 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:34:18 crc kubenswrapper[4982]: I0122 06:34:18.974970 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:34:18 crc kubenswrapper[4982]: I0122 06:34:18.975651 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:34:18 crc kubenswrapper[4982]: I0122 06:34:18.975715 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae" gracePeriod=600 Jan 22 06:34:19 crc kubenswrapper[4982]: I0122 06:34:19.283261 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae" exitCode=0 Jan 22 06:34:19 crc kubenswrapper[4982]: I0122 06:34:19.283360 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae"} Jan 22 06:34:19 crc kubenswrapper[4982]: I0122 06:34:19.283619 4982 scope.go:117] "RemoveContainer" containerID="32e84d617816cde5bb9b78f61b87b5d0ea31e9b82c1ad5afbb9ff31d965f7faf" Jan 22 06:34:20 crc kubenswrapper[4982]: I0122 06:34:20.293162 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58"} Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.889203 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:30 crc kubenswrapper[4982]: E0122 06:34:30.891509 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="extract-utilities" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.891542 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="extract-utilities" Jan 22 06:34:30 crc kubenswrapper[4982]: E0122 06:34:30.891570 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="registry-server" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.891584 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="registry-server" Jan 22 06:34:30 crc kubenswrapper[4982]: E0122 06:34:30.891611 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="extract-content" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.891627 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="extract-content" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.891941 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0de91d7-e82c-40a6-8ef3-d057c2b762d3" containerName="registry-server" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.893675 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:30 crc kubenswrapper[4982]: I0122 06:34:30.906065 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.006202 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8prj\" (UniqueName: \"kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.006364 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.006429 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.107537 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.107596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.107651 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8prj\" (UniqueName: \"kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.108108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.108141 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.134955 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8prj\" (UniqueName: \"kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj\") pod \"redhat-marketplace-4jm5f\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.260066 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:31 crc kubenswrapper[4982]: I0122 06:34:31.706648 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:32 crc kubenswrapper[4982]: I0122 06:34:32.394197 4982 generic.go:334] "Generic (PLEG): container finished" podID="11793134-2acb-4fd9-b081-c104381cf696" containerID="b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25" exitCode=0 Jan 22 06:34:32 crc kubenswrapper[4982]: I0122 06:34:32.394292 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerDied","Data":"b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25"} Jan 22 06:34:32 crc kubenswrapper[4982]: I0122 06:34:32.394377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerStarted","Data":"6de5b939964a652f547f0ce569383b1d57d9d90481c19dba54cc1aa56d8cc45c"} Jan 22 06:34:33 crc kubenswrapper[4982]: I0122 06:34:33.407784 4982 generic.go:334] "Generic (PLEG): container finished" podID="11793134-2acb-4fd9-b081-c104381cf696" containerID="6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44" exitCode=0 Jan 22 06:34:33 crc kubenswrapper[4982]: I0122 06:34:33.407922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerDied","Data":"6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44"} Jan 22 06:34:34 crc kubenswrapper[4982]: I0122 06:34:34.423514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerStarted","Data":"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b"} Jan 22 06:34:34 crc kubenswrapper[4982]: I0122 06:34:34.464723 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4jm5f" podStartSLOduration=3.036134331 podStartE2EDuration="4.464699842s" podCreationTimestamp="2026-01-22 06:34:30 +0000 UTC" firstStartedPulling="2026-01-22 06:34:32.397794684 +0000 UTC m=+2933.236432697" lastFinishedPulling="2026-01-22 06:34:33.826360175 +0000 UTC m=+2934.664998208" observedRunningTime="2026-01-22 06:34:34.456047931 +0000 UTC m=+2935.294685964" watchObservedRunningTime="2026-01-22 06:34:34.464699842 +0000 UTC m=+2935.303337885" Jan 22 06:34:41 crc kubenswrapper[4982]: I0122 06:34:41.260641 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:41 crc kubenswrapper[4982]: I0122 06:34:41.261312 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:41 crc kubenswrapper[4982]: I0122 06:34:41.329501 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:41 crc kubenswrapper[4982]: I0122 06:34:41.575467 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:41 crc kubenswrapper[4982]: I0122 06:34:41.631729 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:43 crc kubenswrapper[4982]: I0122 06:34:43.502911 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4jm5f" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="registry-server" containerID="cri-o://dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b" gracePeriod=2 Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.448573 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.515002 4982 generic.go:334] "Generic (PLEG): container finished" podID="11793134-2acb-4fd9-b081-c104381cf696" containerID="dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b" exitCode=0 Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.515066 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerDied","Data":"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b"} Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.515109 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4jm5f" event={"ID":"11793134-2acb-4fd9-b081-c104381cf696","Type":"ContainerDied","Data":"6de5b939964a652f547f0ce569383b1d57d9d90481c19dba54cc1aa56d8cc45c"} Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.515139 4982 scope.go:117] "RemoveContainer" containerID="dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.515386 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4jm5f" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.528512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8prj\" (UniqueName: \"kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj\") pod \"11793134-2acb-4fd9-b081-c104381cf696\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.528614 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content\") pod \"11793134-2acb-4fd9-b081-c104381cf696\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.528647 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities\") pod \"11793134-2acb-4fd9-b081-c104381cf696\" (UID: \"11793134-2acb-4fd9-b081-c104381cf696\") " Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.530439 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities" (OuterVolumeSpecName: "utilities") pod "11793134-2acb-4fd9-b081-c104381cf696" (UID: "11793134-2acb-4fd9-b081-c104381cf696"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.535585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj" (OuterVolumeSpecName: "kube-api-access-k8prj") pod "11793134-2acb-4fd9-b081-c104381cf696" (UID: "11793134-2acb-4fd9-b081-c104381cf696"). InnerVolumeSpecName "kube-api-access-k8prj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.549417 4982 scope.go:117] "RemoveContainer" containerID="6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.569795 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11793134-2acb-4fd9-b081-c104381cf696" (UID: "11793134-2acb-4fd9-b081-c104381cf696"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.583232 4982 scope.go:117] "RemoveContainer" containerID="b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.605645 4982 scope.go:117] "RemoveContainer" containerID="dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b" Jan 22 06:34:44 crc kubenswrapper[4982]: E0122 06:34:44.606278 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b\": container with ID starting with dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b not found: ID does not exist" containerID="dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.606354 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b"} err="failed to get container status \"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b\": rpc error: code = NotFound desc = could not find container \"dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b\": container with ID starting with dafe2b4cd1459560d8f54d4d4877024c5be825b119efc7d9b586e6b15672223b not found: ID does not exist" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.606398 4982 scope.go:117] "RemoveContainer" containerID="6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44" Jan 22 06:34:44 crc kubenswrapper[4982]: E0122 06:34:44.607142 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44\": container with ID starting with 6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44 not found: ID does not exist" containerID="6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.607209 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44"} err="failed to get container status \"6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44\": rpc error: code = NotFound desc = could not find container \"6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44\": container with ID starting with 6219158a7d8afce426f455efbc48c1068b7a282a553b0046b9cbded36f8e0e44 not found: ID does not exist" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.607297 4982 scope.go:117] "RemoveContainer" containerID="b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25" Jan 22 06:34:44 crc kubenswrapper[4982]: E0122 06:34:44.607748 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25\": container with ID starting with b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25 not found: ID does not exist" containerID="b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.607790 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25"} err="failed to get container status \"b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25\": rpc error: code = NotFound desc = could not find container \"b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25\": container with ID starting with b975b2a6e985d091f67dc1bc8b6277aa6024712849391e85781ee30bfe64ba25 not found: ID does not exist" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.631147 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8prj\" (UniqueName: \"kubernetes.io/projected/11793134-2acb-4fd9-b081-c104381cf696-kube-api-access-k8prj\") on node \"crc\" DevicePath \"\"" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.631201 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.631221 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11793134-2acb-4fd9-b081-c104381cf696-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.871224 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:44 crc kubenswrapper[4982]: I0122 06:34:44.880587 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4jm5f"] Jan 22 06:34:45 crc kubenswrapper[4982]: I0122 06:34:45.733219 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11793134-2acb-4fd9-b081-c104381cf696" path="/var/lib/kubelet/pods/11793134-2acb-4fd9-b081-c104381cf696/volumes" Jan 22 06:36:48 crc kubenswrapper[4982]: I0122 06:36:48.974375 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:36:48 crc kubenswrapper[4982]: I0122 06:36:48.974943 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:37:18 crc kubenswrapper[4982]: I0122 06:37:18.974533 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:37:18 crc kubenswrapper[4982]: I0122 06:37:18.975313 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:37:48 crc kubenswrapper[4982]: I0122 06:37:48.974458 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:37:48 crc kubenswrapper[4982]: I0122 06:37:48.975988 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:37:48 crc kubenswrapper[4982]: I0122 06:37:48.976088 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:37:48 crc kubenswrapper[4982]: I0122 06:37:48.977013 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:37:48 crc kubenswrapper[4982]: I0122 06:37:48.977071 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" gracePeriod=600 Jan 22 06:37:49 crc kubenswrapper[4982]: E0122 06:37:49.103772 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:37:49 crc kubenswrapper[4982]: I0122 06:37:49.715454 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" exitCode=0 Jan 22 06:37:49 crc kubenswrapper[4982]: I0122 06:37:49.715514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58"} Jan 22 06:37:49 crc kubenswrapper[4982]: I0122 06:37:49.715973 4982 scope.go:117] "RemoveContainer" containerID="d1258c54d273754ecc60774a68682b35819117c8b9ae3e2709be7b39d568f0ae" Jan 22 06:37:49 crc kubenswrapper[4982]: I0122 06:37:49.717012 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:37:49 crc kubenswrapper[4982]: E0122 06:37:49.717785 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:38:02 crc kubenswrapper[4982]: I0122 06:38:02.719612 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:38:02 crc kubenswrapper[4982]: E0122 06:38:02.720762 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.038300 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:13 crc kubenswrapper[4982]: E0122 06:38:13.039101 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="extract-content" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.039117 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="extract-content" Jan 22 06:38:13 crc kubenswrapper[4982]: E0122 06:38:13.039135 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="extract-utilities" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.039142 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="extract-utilities" Jan 22 06:38:13 crc kubenswrapper[4982]: E0122 06:38:13.039162 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="registry-server" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.039170 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="registry-server" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.039333 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="11793134-2acb-4fd9-b081-c104381cf696" containerName="registry-server" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.040462 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.058698 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.126512 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.126809 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgkfw\" (UniqueName: \"kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.126995 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.228348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.228425 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgkfw\" (UniqueName: \"kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.228464 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.229041 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.229101 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.250386 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgkfw\" (UniqueName: \"kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw\") pod \"certified-operators-gnfkg\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.357654 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.619650 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.651700 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.654240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.664143 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.719735 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:38:13 crc kubenswrapper[4982]: E0122 06:38:13.719959 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.735355 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.735426 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl42q\" (UniqueName: \"kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.735450 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.836907 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl42q\" (UniqueName: \"kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.836994 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.837127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.837681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.837705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.861085 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl42q\" (UniqueName: \"kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q\") pod \"community-operators-2mf2g\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.928972 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerStarted","Data":"f776f6cfa42c4aec44b2bceca456b16a495299ede7e69b125673855b02708f20"} Jan 22 06:38:13 crc kubenswrapper[4982]: I0122 06:38:13.972737 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:14 crc kubenswrapper[4982]: I0122 06:38:14.318779 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:14 crc kubenswrapper[4982]: W0122 06:38:14.325418 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcda33053_6bab_42a1_9c7b_af8d8fd0ba81.slice/crio-95cf5ef6b728e2cf856d4e8ff6ac33406abf08780206d42b43f622a2f00f4ac5 WatchSource:0}: Error finding container 95cf5ef6b728e2cf856d4e8ff6ac33406abf08780206d42b43f622a2f00f4ac5: Status 404 returned error can't find the container with id 95cf5ef6b728e2cf856d4e8ff6ac33406abf08780206d42b43f622a2f00f4ac5 Jan 22 06:38:14 crc kubenswrapper[4982]: I0122 06:38:14.936715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerStarted","Data":"95cf5ef6b728e2cf856d4e8ff6ac33406abf08780206d42b43f622a2f00f4ac5"} Jan 22 06:38:16 crc kubenswrapper[4982]: I0122 06:38:16.952914 4982 generic.go:334] "Generic (PLEG): container finished" podID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerID="1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693" exitCode=0 Jan 22 06:38:16 crc kubenswrapper[4982]: I0122 06:38:16.953027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerDied","Data":"1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693"} Jan 22 06:38:16 crc kubenswrapper[4982]: I0122 06:38:16.955545 4982 generic.go:334] "Generic (PLEG): container finished" podID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerID="bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021" exitCode=0 Jan 22 06:38:16 crc kubenswrapper[4982]: I0122 06:38:16.955592 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerDied","Data":"bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021"} Jan 22 06:38:16 crc kubenswrapper[4982]: I0122 06:38:16.955647 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:38:17 crc kubenswrapper[4982]: I0122 06:38:17.966111 4982 generic.go:334] "Generic (PLEG): container finished" podID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerID="5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810" exitCode=0 Jan 22 06:38:17 crc kubenswrapper[4982]: I0122 06:38:17.966155 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerDied","Data":"5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810"} Jan 22 06:38:17 crc kubenswrapper[4982]: I0122 06:38:17.969991 4982 generic.go:334] "Generic (PLEG): container finished" podID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerID="ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c" exitCode=0 Jan 22 06:38:17 crc kubenswrapper[4982]: I0122 06:38:17.970023 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerDied","Data":"ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c"} Jan 22 06:38:18 crc kubenswrapper[4982]: I0122 06:38:18.978526 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerStarted","Data":"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408"} Jan 22 06:38:18 crc kubenswrapper[4982]: I0122 06:38:18.980637 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerStarted","Data":"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0"} Jan 22 06:38:18 crc kubenswrapper[4982]: I0122 06:38:18.999660 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2mf2g" podStartSLOduration=4.485140152 podStartE2EDuration="5.999639754s" podCreationTimestamp="2026-01-22 06:38:13 +0000 UTC" firstStartedPulling="2026-01-22 06:38:16.955173759 +0000 UTC m=+3157.793811762" lastFinishedPulling="2026-01-22 06:38:18.469673361 +0000 UTC m=+3159.308311364" observedRunningTime="2026-01-22 06:38:18.993265996 +0000 UTC m=+3159.831904009" watchObservedRunningTime="2026-01-22 06:38:18.999639754 +0000 UTC m=+3159.838277767" Jan 22 06:38:19 crc kubenswrapper[4982]: I0122 06:38:19.022200 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gnfkg" podStartSLOduration=4.642178569 podStartE2EDuration="6.022171077s" podCreationTimestamp="2026-01-22 06:38:13 +0000 UTC" firstStartedPulling="2026-01-22 06:38:16.958072125 +0000 UTC m=+3157.796710128" lastFinishedPulling="2026-01-22 06:38:18.338064633 +0000 UTC m=+3159.176702636" observedRunningTime="2026-01-22 06:38:19.017471743 +0000 UTC m=+3159.856109746" watchObservedRunningTime="2026-01-22 06:38:19.022171077 +0000 UTC m=+3159.860809120" Jan 22 06:38:23 crc kubenswrapper[4982]: I0122 06:38:23.358062 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:23 crc kubenswrapper[4982]: I0122 06:38:23.358687 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:23 crc kubenswrapper[4982]: I0122 06:38:23.421052 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:23 crc kubenswrapper[4982]: I0122 06:38:23.973072 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:23 crc kubenswrapper[4982]: I0122 06:38:23.973372 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:24 crc kubenswrapper[4982]: I0122 06:38:24.039922 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:24 crc kubenswrapper[4982]: I0122 06:38:24.080954 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:24 crc kubenswrapper[4982]: I0122 06:38:24.666556 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:25 crc kubenswrapper[4982]: I0122 06:38:25.067052 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:26 crc kubenswrapper[4982]: I0122 06:38:26.033403 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gnfkg" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="registry-server" containerID="cri-o://a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0" gracePeriod=2 Jan 22 06:38:26 crc kubenswrapper[4982]: I0122 06:38:26.719951 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:38:26 crc kubenswrapper[4982]: E0122 06:38:26.720319 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.070935 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.071363 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2mf2g" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="registry-server" containerID="cri-o://2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408" gracePeriod=2 Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.624243 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.747326 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgkfw\" (UniqueName: \"kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw\") pod \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.747416 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content\") pod \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.747461 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities\") pod \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\" (UID: \"cf0e40fa-66bc-4c7d-aabe-c05c876640bd\") " Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.748573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities" (OuterVolumeSpecName: "utilities") pod "cf0e40fa-66bc-4c7d-aabe-c05c876640bd" (UID: "cf0e40fa-66bc-4c7d-aabe-c05c876640bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.784314 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw" (OuterVolumeSpecName: "kube-api-access-xgkfw") pod "cf0e40fa-66bc-4c7d-aabe-c05c876640bd" (UID: "cf0e40fa-66bc-4c7d-aabe-c05c876640bd"). InnerVolumeSpecName "kube-api-access-xgkfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.820554 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf0e40fa-66bc-4c7d-aabe-c05c876640bd" (UID: "cf0e40fa-66bc-4c7d-aabe-c05c876640bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.848772 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.848803 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:27 crc kubenswrapper[4982]: I0122 06:38:27.848812 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgkfw\" (UniqueName: \"kubernetes.io/projected/cf0e40fa-66bc-4c7d-aabe-c05c876640bd-kube-api-access-xgkfw\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.009196 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.048154 4982 generic.go:334] "Generic (PLEG): container finished" podID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerID="a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0" exitCode=0 Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.048223 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerDied","Data":"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0"} Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.048252 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gnfkg" event={"ID":"cf0e40fa-66bc-4c7d-aabe-c05c876640bd","Type":"ContainerDied","Data":"f776f6cfa42c4aec44b2bceca456b16a495299ede7e69b125673855b02708f20"} Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.048268 4982 scope.go:117] "RemoveContainer" containerID="a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.048316 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gnfkg" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.051548 4982 generic.go:334] "Generic (PLEG): container finished" podID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerID="2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408" exitCode=0 Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.051579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerDied","Data":"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408"} Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.051627 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2mf2g" event={"ID":"cda33053-6bab-42a1-9c7b-af8d8fd0ba81","Type":"ContainerDied","Data":"95cf5ef6b728e2cf856d4e8ff6ac33406abf08780206d42b43f622a2f00f4ac5"} Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.051666 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2mf2g" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.084729 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.086994 4982 scope.go:117] "RemoveContainer" containerID="5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.091487 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gnfkg"] Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.110303 4982 scope.go:117] "RemoveContainer" containerID="bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.128454 4982 scope.go:117] "RemoveContainer" containerID="a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.128782 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0\": container with ID starting with a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0 not found: ID does not exist" containerID="a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.128820 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0"} err="failed to get container status \"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0\": rpc error: code = NotFound desc = could not find container \"a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0\": container with ID starting with a529750f5df386937c47aad524289a4a8ebb250954b596690cf49754ac9c6da0 not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.128844 4982 scope.go:117] "RemoveContainer" containerID="5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.129118 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810\": container with ID starting with 5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810 not found: ID does not exist" containerID="5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.129138 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810"} err="failed to get container status \"5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810\": rpc error: code = NotFound desc = could not find container \"5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810\": container with ID starting with 5c4f96df9bf118b1c9f8fc892aa2719e07df800099c797563300d228115a5810 not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.129155 4982 scope.go:117] "RemoveContainer" containerID="bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.129501 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021\": container with ID starting with bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021 not found: ID does not exist" containerID="bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.129522 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021"} err="failed to get container status \"bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021\": rpc error: code = NotFound desc = could not find container \"bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021\": container with ID starting with bc3574635c840cfd5c1d8c6135dc219f75d737f128e8fd67b037183d6b155021 not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.129534 4982 scope.go:117] "RemoveContainer" containerID="2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.144394 4982 scope.go:117] "RemoveContainer" containerID="ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.152113 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content\") pod \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.152250 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl42q\" (UniqueName: \"kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q\") pod \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.152379 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities\") pod \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\" (UID: \"cda33053-6bab-42a1-9c7b-af8d8fd0ba81\") " Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.153605 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities" (OuterVolumeSpecName: "utilities") pod "cda33053-6bab-42a1-9c7b-af8d8fd0ba81" (UID: "cda33053-6bab-42a1-9c7b-af8d8fd0ba81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.158618 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q" (OuterVolumeSpecName: "kube-api-access-cl42q") pod "cda33053-6bab-42a1-9c7b-af8d8fd0ba81" (UID: "cda33053-6bab-42a1-9c7b-af8d8fd0ba81"). InnerVolumeSpecName "kube-api-access-cl42q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.171215 4982 scope.go:117] "RemoveContainer" containerID="1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.201089 4982 scope.go:117] "RemoveContainer" containerID="2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.203156 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408\": container with ID starting with 2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408 not found: ID does not exist" containerID="2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.203238 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408"} err="failed to get container status \"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408\": rpc error: code = NotFound desc = could not find container \"2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408\": container with ID starting with 2ca4492be34342d2066e8914c36b2af8c44d3fe0256cdf33f8aee4ce67d09408 not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.203284 4982 scope.go:117] "RemoveContainer" containerID="ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.203750 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c\": container with ID starting with ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c not found: ID does not exist" containerID="ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.203809 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c"} err="failed to get container status \"ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c\": rpc error: code = NotFound desc = could not find container \"ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c\": container with ID starting with ac2f4d95b64837603e913cf8c645ce14409d182528297a7ee2b5d3c243fd717c not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.203873 4982 scope.go:117] "RemoveContainer" containerID="1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693" Jan 22 06:38:28 crc kubenswrapper[4982]: E0122 06:38:28.204276 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693\": container with ID starting with 1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693 not found: ID does not exist" containerID="1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.204347 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693"} err="failed to get container status \"1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693\": rpc error: code = NotFound desc = could not find container \"1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693\": container with ID starting with 1b14eccbf06813e095f61f3be3ed25fb827ab54ace3743f1eecbc3c399801693 not found: ID does not exist" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.211308 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cda33053-6bab-42a1-9c7b-af8d8fd0ba81" (UID: "cda33053-6bab-42a1-9c7b-af8d8fd0ba81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.254008 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.254037 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.254048 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl42q\" (UniqueName: \"kubernetes.io/projected/cda33053-6bab-42a1-9c7b-af8d8fd0ba81-kube-api-access-cl42q\") on node \"crc\" DevicePath \"\"" Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.403748 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:28 crc kubenswrapper[4982]: I0122 06:38:28.413132 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2mf2g"] Jan 22 06:38:29 crc kubenswrapper[4982]: I0122 06:38:29.737323 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" path="/var/lib/kubelet/pods/cda33053-6bab-42a1-9c7b-af8d8fd0ba81/volumes" Jan 22 06:38:29 crc kubenswrapper[4982]: I0122 06:38:29.738836 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" path="/var/lib/kubelet/pods/cf0e40fa-66bc-4c7d-aabe-c05c876640bd/volumes" Jan 22 06:38:30 crc kubenswrapper[4982]: E0122 06:38:30.029243 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Jan 22 06:38:37 crc kubenswrapper[4982]: I0122 06:38:37.719610 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:38:37 crc kubenswrapper[4982]: E0122 06:38:37.720096 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:38:52 crc kubenswrapper[4982]: I0122 06:38:52.720354 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:38:52 crc kubenswrapper[4982]: E0122 06:38:52.721224 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:39:03 crc kubenswrapper[4982]: I0122 06:39:03.719538 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:39:03 crc kubenswrapper[4982]: E0122 06:39:03.721387 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:39:14 crc kubenswrapper[4982]: I0122 06:39:14.718787 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:39:14 crc kubenswrapper[4982]: E0122 06:39:14.719406 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:39:28 crc kubenswrapper[4982]: I0122 06:39:28.719190 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:39:28 crc kubenswrapper[4982]: E0122 06:39:28.720063 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:39:43 crc kubenswrapper[4982]: I0122 06:39:43.719651 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:39:43 crc kubenswrapper[4982]: E0122 06:39:43.720427 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:39:54 crc kubenswrapper[4982]: I0122 06:39:54.719669 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:39:54 crc kubenswrapper[4982]: E0122 06:39:54.720596 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:40:07 crc kubenswrapper[4982]: I0122 06:40:07.719962 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:40:07 crc kubenswrapper[4982]: E0122 06:40:07.721377 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:40:18 crc kubenswrapper[4982]: I0122 06:40:18.719769 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:40:18 crc kubenswrapper[4982]: E0122 06:40:18.723000 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:40:31 crc kubenswrapper[4982]: I0122 06:40:31.719261 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:40:31 crc kubenswrapper[4982]: E0122 06:40:31.720471 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:40:43 crc kubenswrapper[4982]: I0122 06:40:43.719580 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:40:43 crc kubenswrapper[4982]: E0122 06:40:43.720578 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:40:55 crc kubenswrapper[4982]: I0122 06:40:55.720482 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:40:55 crc kubenswrapper[4982]: E0122 06:40:55.721813 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:41:10 crc kubenswrapper[4982]: I0122 06:41:10.719409 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:41:10 crc kubenswrapper[4982]: E0122 06:41:10.720195 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:41:24 crc kubenswrapper[4982]: I0122 06:41:24.719922 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:41:24 crc kubenswrapper[4982]: E0122 06:41:24.720902 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:41:36 crc kubenswrapper[4982]: I0122 06:41:36.720256 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:41:36 crc kubenswrapper[4982]: E0122 06:41:36.721774 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:41:50 crc kubenswrapper[4982]: I0122 06:41:50.719025 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:41:50 crc kubenswrapper[4982]: E0122 06:41:50.721378 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:42:01 crc kubenswrapper[4982]: I0122 06:42:01.720470 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:42:01 crc kubenswrapper[4982]: E0122 06:42:01.721819 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:42:12 crc kubenswrapper[4982]: I0122 06:42:12.719945 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:42:12 crc kubenswrapper[4982]: E0122 06:42:12.720775 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:42:27 crc kubenswrapper[4982]: I0122 06:42:27.721966 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:42:27 crc kubenswrapper[4982]: E0122 06:42:27.722919 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:42:38 crc kubenswrapper[4982]: I0122 06:42:38.720167 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:42:38 crc kubenswrapper[4982]: E0122 06:42:38.721397 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:42:50 crc kubenswrapper[4982]: I0122 06:42:50.720408 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:42:51 crc kubenswrapper[4982]: I0122 06:42:51.406507 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e"} Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.207441 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208332 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208347 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208363 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208371 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208381 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="extract-content" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208390 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="extract-content" Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208407 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="extract-utilities" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208416 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="extract-utilities" Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208438 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="extract-content" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208445 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="extract-content" Jan 22 06:43:55 crc kubenswrapper[4982]: E0122 06:43:55.208464 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="extract-utilities" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208472 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="extract-utilities" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208633 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda33053-6bab-42a1-9c7b-af8d8fd0ba81" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.208647 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf0e40fa-66bc-4c7d-aabe-c05c876640bd" containerName="registry-server" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.209763 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.226322 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.307522 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.307616 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlxlg\" (UniqueName: \"kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.307681 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.408685 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.408836 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.408896 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlxlg\" (UniqueName: \"kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.409342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.409397 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.428732 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlxlg\" (UniqueName: \"kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg\") pod \"redhat-operators-g775c\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.563211 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.820265 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:43:55 crc kubenswrapper[4982]: I0122 06:43:55.981120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerStarted","Data":"4c0c844cbef769fa53f0d483f2966b7ac1acb56b2a5aeeaa66c68e2028820f53"} Jan 22 06:43:56 crc kubenswrapper[4982]: I0122 06:43:56.988463 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerID="1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301" exitCode=0 Jan 22 06:43:56 crc kubenswrapper[4982]: I0122 06:43:56.988943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerDied","Data":"1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301"} Jan 22 06:43:56 crc kubenswrapper[4982]: I0122 06:43:56.991261 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:43:58 crc kubenswrapper[4982]: I0122 06:43:58.000399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerStarted","Data":"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54"} Jan 22 06:43:59 crc kubenswrapper[4982]: I0122 06:43:59.011375 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerID="5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54" exitCode=0 Jan 22 06:43:59 crc kubenswrapper[4982]: I0122 06:43:59.011434 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerDied","Data":"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54"} Jan 22 06:44:00 crc kubenswrapper[4982]: I0122 06:44:00.023623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerStarted","Data":"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438"} Jan 22 06:44:00 crc kubenswrapper[4982]: I0122 06:44:00.053568 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g775c" podStartSLOduration=2.6222551469999997 podStartE2EDuration="5.053538612s" podCreationTimestamp="2026-01-22 06:43:55 +0000 UTC" firstStartedPulling="2026-01-22 06:43:56.991013046 +0000 UTC m=+3497.829651059" lastFinishedPulling="2026-01-22 06:43:59.422296481 +0000 UTC m=+3500.260934524" observedRunningTime="2026-01-22 06:44:00.047470197 +0000 UTC m=+3500.886108200" watchObservedRunningTime="2026-01-22 06:44:00.053538612 +0000 UTC m=+3500.892176655" Jan 22 06:44:05 crc kubenswrapper[4982]: I0122 06:44:05.563368 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:05 crc kubenswrapper[4982]: I0122 06:44:05.563774 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:06 crc kubenswrapper[4982]: I0122 06:44:06.636161 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g775c" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="registry-server" probeResult="failure" output=< Jan 22 06:44:06 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 06:44:06 crc kubenswrapper[4982]: > Jan 22 06:44:15 crc kubenswrapper[4982]: I0122 06:44:15.633595 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:15 crc kubenswrapper[4982]: I0122 06:44:15.710325 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:15 crc kubenswrapper[4982]: I0122 06:44:15.874989 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:44:17 crc kubenswrapper[4982]: I0122 06:44:17.167638 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g775c" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="registry-server" containerID="cri-o://7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438" gracePeriod=2 Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.781878 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.869214 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content\") pod \"d8ee37a9-148f-4578-8267-cc79cd327b29\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.869401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlxlg\" (UniqueName: \"kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg\") pod \"d8ee37a9-148f-4578-8267-cc79cd327b29\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.869436 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities\") pod \"d8ee37a9-148f-4578-8267-cc79cd327b29\" (UID: \"d8ee37a9-148f-4578-8267-cc79cd327b29\") " Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.870322 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities" (OuterVolumeSpecName: "utilities") pod "d8ee37a9-148f-4578-8267-cc79cd327b29" (UID: "d8ee37a9-148f-4578-8267-cc79cd327b29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.881529 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg" (OuterVolumeSpecName: "kube-api-access-wlxlg") pod "d8ee37a9-148f-4578-8267-cc79cd327b29" (UID: "d8ee37a9-148f-4578-8267-cc79cd327b29"). InnerVolumeSpecName "kube-api-access-wlxlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.971645 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlxlg\" (UniqueName: \"kubernetes.io/projected/d8ee37a9-148f-4578-8267-cc79cd327b29-kube-api-access-wlxlg\") on node \"crc\" DevicePath \"\"" Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.971702 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:44:18 crc kubenswrapper[4982]: I0122 06:44:18.990036 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8ee37a9-148f-4578-8267-cc79cd327b29" (UID: "d8ee37a9-148f-4578-8267-cc79cd327b29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.072977 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ee37a9-148f-4578-8267-cc79cd327b29-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.194171 4982 generic.go:334] "Generic (PLEG): container finished" podID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerID="7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438" exitCode=0 Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.194234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerDied","Data":"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438"} Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.194261 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g775c" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.194283 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g775c" event={"ID":"d8ee37a9-148f-4578-8267-cc79cd327b29","Type":"ContainerDied","Data":"4c0c844cbef769fa53f0d483f2966b7ac1acb56b2a5aeeaa66c68e2028820f53"} Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.194314 4982 scope.go:117] "RemoveContainer" containerID="7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.225519 4982 scope.go:117] "RemoveContainer" containerID="5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.263671 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.273323 4982 scope.go:117] "RemoveContainer" containerID="1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.274049 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g775c"] Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.301120 4982 scope.go:117] "RemoveContainer" containerID="7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438" Jan 22 06:44:19 crc kubenswrapper[4982]: E0122 06:44:19.301783 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438\": container with ID starting with 7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438 not found: ID does not exist" containerID="7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.301933 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438"} err="failed to get container status \"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438\": rpc error: code = NotFound desc = could not find container \"7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438\": container with ID starting with 7359762be9062abdc72f9ec51530ade1115b609b382f16b7346e7e4d7c8f0438 not found: ID does not exist" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.301997 4982 scope.go:117] "RemoveContainer" containerID="5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54" Jan 22 06:44:19 crc kubenswrapper[4982]: E0122 06:44:19.302483 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54\": container with ID starting with 5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54 not found: ID does not exist" containerID="5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.302538 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54"} err="failed to get container status \"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54\": rpc error: code = NotFound desc = could not find container \"5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54\": container with ID starting with 5eead9edc94b6e89e9a50718c5d368e7b671e79b3f8b876b09933cce57648b54 not found: ID does not exist" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.302584 4982 scope.go:117] "RemoveContainer" containerID="1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301" Jan 22 06:44:19 crc kubenswrapper[4982]: E0122 06:44:19.303065 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301\": container with ID starting with 1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301 not found: ID does not exist" containerID="1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.303105 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301"} err="failed to get container status \"1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301\": rpc error: code = NotFound desc = could not find container \"1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301\": container with ID starting with 1f0f52b93de9798e521c086f3494be413f08fcfc89359247e42f657bbfd77301 not found: ID does not exist" Jan 22 06:44:19 crc kubenswrapper[4982]: I0122 06:44:19.731940 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" path="/var/lib/kubelet/pods/d8ee37a9-148f-4578-8267-cc79cd327b29/volumes" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.829625 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:44:50 crc kubenswrapper[4982]: E0122 06:44:50.831247 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="extract-content" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.831280 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="extract-content" Jan 22 06:44:50 crc kubenswrapper[4982]: E0122 06:44:50.831312 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="extract-utilities" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.831326 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="extract-utilities" Jan 22 06:44:50 crc kubenswrapper[4982]: E0122 06:44:50.831345 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="registry-server" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.831361 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="registry-server" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.831624 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ee37a9-148f-4578-8267-cc79cd327b29" containerName="registry-server" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.834262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.854440 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.863220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.863370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.863470 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcbxh\" (UniqueName: \"kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.964573 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.964627 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.964654 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcbxh\" (UniqueName: \"kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.965382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.965594 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:50 crc kubenswrapper[4982]: I0122 06:44:50.982906 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcbxh\" (UniqueName: \"kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh\") pod \"redhat-marketplace-ztzzw\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:51 crc kubenswrapper[4982]: I0122 06:44:51.164833 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:44:51 crc kubenswrapper[4982]: I0122 06:44:51.581282 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:44:52 crc kubenswrapper[4982]: I0122 06:44:52.512091 4982 generic.go:334] "Generic (PLEG): container finished" podID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerID="0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a" exitCode=0 Jan 22 06:44:52 crc kubenswrapper[4982]: I0122 06:44:52.512155 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerDied","Data":"0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a"} Jan 22 06:44:52 crc kubenswrapper[4982]: I0122 06:44:52.512360 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerStarted","Data":"0aa27a633063877fc4ec8686e84d91b8de7b56681c75190995acdc6ca810f0c9"} Jan 22 06:44:53 crc kubenswrapper[4982]: I0122 06:44:53.525312 4982 generic.go:334] "Generic (PLEG): container finished" podID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerID="019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27" exitCode=0 Jan 22 06:44:53 crc kubenswrapper[4982]: I0122 06:44:53.525439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerDied","Data":"019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27"} Jan 22 06:44:55 crc kubenswrapper[4982]: I0122 06:44:55.540763 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerStarted","Data":"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443"} Jan 22 06:44:55 crc kubenswrapper[4982]: I0122 06:44:55.557220 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ztzzw" podStartSLOduration=4.039027086 podStartE2EDuration="5.557201501s" podCreationTimestamp="2026-01-22 06:44:50 +0000 UTC" firstStartedPulling="2026-01-22 06:44:52.514020437 +0000 UTC m=+3553.352658450" lastFinishedPulling="2026-01-22 06:44:54.032194862 +0000 UTC m=+3554.870832865" observedRunningTime="2026-01-22 06:44:55.556404029 +0000 UTC m=+3556.395042032" watchObservedRunningTime="2026-01-22 06:44:55.557201501 +0000 UTC m=+3556.395839494" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.161060 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp"] Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.164633 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.168797 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.180920 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp"] Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.181125 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.262733 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.262798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtjdd\" (UniqueName: \"kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.262935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.364316 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.364447 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.364525 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtjdd\" (UniqueName: \"kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.366330 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.371169 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.388362 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtjdd\" (UniqueName: \"kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd\") pod \"collect-profiles-29484405-xlvhp\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.516830 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:00 crc kubenswrapper[4982]: I0122 06:45:00.793674 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp"] Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.169184 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.169611 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.241224 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.612490 4982 generic.go:334] "Generic (PLEG): container finished" podID="b38cb7c7-9698-4142-896b-01cbb320f58a" containerID="67d2b5f3c1efe69086c8b4388b8e823c6cc54f075c82d8d33600ba80c5db133d" exitCode=0 Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.613205 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" event={"ID":"b38cb7c7-9698-4142-896b-01cbb320f58a","Type":"ContainerDied","Data":"67d2b5f3c1efe69086c8b4388b8e823c6cc54f075c82d8d33600ba80c5db133d"} Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.613403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" event={"ID":"b38cb7c7-9698-4142-896b-01cbb320f58a","Type":"ContainerStarted","Data":"ba95e54783d636b596db4235eae8cffffeed30e75f9047005e0368f07e558e0f"} Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.661650 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:01 crc kubenswrapper[4982]: I0122 06:45:01.734792 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.025276 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.208484 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtjdd\" (UniqueName: \"kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd\") pod \"b38cb7c7-9698-4142-896b-01cbb320f58a\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.208629 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume\") pod \"b38cb7c7-9698-4142-896b-01cbb320f58a\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.208800 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume\") pod \"b38cb7c7-9698-4142-896b-01cbb320f58a\" (UID: \"b38cb7c7-9698-4142-896b-01cbb320f58a\") " Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.209375 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume" (OuterVolumeSpecName: "config-volume") pod "b38cb7c7-9698-4142-896b-01cbb320f58a" (UID: "b38cb7c7-9698-4142-896b-01cbb320f58a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.218116 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b38cb7c7-9698-4142-896b-01cbb320f58a" (UID: "b38cb7c7-9698-4142-896b-01cbb320f58a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.218211 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd" (OuterVolumeSpecName: "kube-api-access-xtjdd") pod "b38cb7c7-9698-4142-896b-01cbb320f58a" (UID: "b38cb7c7-9698-4142-896b-01cbb320f58a"). InnerVolumeSpecName "kube-api-access-xtjdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.311032 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b38cb7c7-9698-4142-896b-01cbb320f58a-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.311096 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b38cb7c7-9698-4142-896b-01cbb320f58a-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.311117 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtjdd\" (UniqueName: \"kubernetes.io/projected/b38cb7c7-9698-4142-896b-01cbb320f58a-kube-api-access-xtjdd\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.633492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" event={"ID":"b38cb7c7-9698-4142-896b-01cbb320f58a","Type":"ContainerDied","Data":"ba95e54783d636b596db4235eae8cffffeed30e75f9047005e0368f07e558e0f"} Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.633539 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba95e54783d636b596db4235eae8cffffeed30e75f9047005e0368f07e558e0f" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.633573 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp" Jan 22 06:45:03 crc kubenswrapper[4982]: I0122 06:45:03.633698 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ztzzw" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="registry-server" containerID="cri-o://e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443" gracePeriod=2 Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.132890 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh"] Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.142353 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484360-9kjlh"] Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.149444 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.326177 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities\") pod \"5823ea57-34e2-47b4-8b7e-647dda7edf38\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.326401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcbxh\" (UniqueName: \"kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh\") pod \"5823ea57-34e2-47b4-8b7e-647dda7edf38\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.326440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content\") pod \"5823ea57-34e2-47b4-8b7e-647dda7edf38\" (UID: \"5823ea57-34e2-47b4-8b7e-647dda7edf38\") " Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.328178 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities" (OuterVolumeSpecName: "utilities") pod "5823ea57-34e2-47b4-8b7e-647dda7edf38" (UID: "5823ea57-34e2-47b4-8b7e-647dda7edf38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.337074 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh" (OuterVolumeSpecName: "kube-api-access-wcbxh") pod "5823ea57-34e2-47b4-8b7e-647dda7edf38" (UID: "5823ea57-34e2-47b4-8b7e-647dda7edf38"). InnerVolumeSpecName "kube-api-access-wcbxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.371459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5823ea57-34e2-47b4-8b7e-647dda7edf38" (UID: "5823ea57-34e2-47b4-8b7e-647dda7edf38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.429835 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcbxh\" (UniqueName: \"kubernetes.io/projected/5823ea57-34e2-47b4-8b7e-647dda7edf38-kube-api-access-wcbxh\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.429921 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.429938 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5823ea57-34e2-47b4-8b7e-647dda7edf38-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.645208 4982 generic.go:334] "Generic (PLEG): container finished" podID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerID="e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443" exitCode=0 Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.645276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerDied","Data":"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443"} Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.645316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztzzw" event={"ID":"5823ea57-34e2-47b4-8b7e-647dda7edf38","Type":"ContainerDied","Data":"0aa27a633063877fc4ec8686e84d91b8de7b56681c75190995acdc6ca810f0c9"} Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.645313 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztzzw" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.645335 4982 scope.go:117] "RemoveContainer" containerID="e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.690275 4982 scope.go:117] "RemoveContainer" containerID="019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.699512 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.705872 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztzzw"] Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.720235 4982 scope.go:117] "RemoveContainer" containerID="0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.749742 4982 scope.go:117] "RemoveContainer" containerID="e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443" Jan 22 06:45:04 crc kubenswrapper[4982]: E0122 06:45:04.750683 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443\": container with ID starting with e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443 not found: ID does not exist" containerID="e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.750757 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443"} err="failed to get container status \"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443\": rpc error: code = NotFound desc = could not find container \"e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443\": container with ID starting with e29f5e1617d22e0dfde9c90640132288da034e85b074ca5801d9beeb4423a443 not found: ID does not exist" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.750814 4982 scope.go:117] "RemoveContainer" containerID="019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27" Jan 22 06:45:04 crc kubenswrapper[4982]: E0122 06:45:04.751362 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27\": container with ID starting with 019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27 not found: ID does not exist" containerID="019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.751404 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27"} err="failed to get container status \"019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27\": rpc error: code = NotFound desc = could not find container \"019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27\": container with ID starting with 019f32c6d9f11e96b8f173a7fcbed87223f0347cfbcdc98f944d830b76bc3e27 not found: ID does not exist" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.751440 4982 scope.go:117] "RemoveContainer" containerID="0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a" Jan 22 06:45:04 crc kubenswrapper[4982]: E0122 06:45:04.751769 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a\": container with ID starting with 0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a not found: ID does not exist" containerID="0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a" Jan 22 06:45:04 crc kubenswrapper[4982]: I0122 06:45:04.751805 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a"} err="failed to get container status \"0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a\": rpc error: code = NotFound desc = could not find container \"0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a\": container with ID starting with 0a42f5fe19c50ea70eab8e7c6ee64ac773ed05b4ebbac9c16ec5076bb630c95a not found: ID does not exist" Jan 22 06:45:05 crc kubenswrapper[4982]: I0122 06:45:05.737922 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" path="/var/lib/kubelet/pods/5823ea57-34e2-47b4-8b7e-647dda7edf38/volumes" Jan 22 06:45:05 crc kubenswrapper[4982]: I0122 06:45:05.739993 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa8c80f1-26e6-488f-aefc-5213ad570754" path="/var/lib/kubelet/pods/fa8c80f1-26e6-488f-aefc-5213ad570754/volumes" Jan 22 06:45:18 crc kubenswrapper[4982]: I0122 06:45:18.974430 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:45:18 crc kubenswrapper[4982]: I0122 06:45:18.975219 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:45:48 crc kubenswrapper[4982]: I0122 06:45:48.974173 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:45:48 crc kubenswrapper[4982]: I0122 06:45:48.975045 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:46:01 crc kubenswrapper[4982]: I0122 06:46:01.160246 4982 scope.go:117] "RemoveContainer" containerID="ddd77a3e93f4b3a83945c0e1f48ddb33321fdc3509f7b7480adc59252c5e333d" Jan 22 06:46:18 crc kubenswrapper[4982]: I0122 06:46:18.974309 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:46:18 crc kubenswrapper[4982]: I0122 06:46:18.975044 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:46:18 crc kubenswrapper[4982]: I0122 06:46:18.975113 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:46:18 crc kubenswrapper[4982]: I0122 06:46:18.977037 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:46:18 crc kubenswrapper[4982]: I0122 06:46:18.977140 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e" gracePeriod=600 Jan 22 06:46:19 crc kubenswrapper[4982]: I0122 06:46:19.430326 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e" exitCode=0 Jan 22 06:46:19 crc kubenswrapper[4982]: I0122 06:46:19.430438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e"} Jan 22 06:46:19 crc kubenswrapper[4982]: I0122 06:46:19.430968 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2"} Jan 22 06:46:19 crc kubenswrapper[4982]: I0122 06:46:19.431008 4982 scope.go:117] "RemoveContainer" containerID="797e5a3617d23d998f549cb0507aec9958ef71c6452e0672a9498ec5ddad7b58" Jan 22 06:48:48 crc kubenswrapper[4982]: I0122 06:48:48.973924 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:48:48 crc kubenswrapper[4982]: I0122 06:48:48.974375 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.193557 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:09 crc kubenswrapper[4982]: E0122 06:49:09.194480 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="extract-content" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194497 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="extract-content" Jan 22 06:49:09 crc kubenswrapper[4982]: E0122 06:49:09.194512 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38cb7c7-9698-4142-896b-01cbb320f58a" containerName="collect-profiles" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194520 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38cb7c7-9698-4142-896b-01cbb320f58a" containerName="collect-profiles" Jan 22 06:49:09 crc kubenswrapper[4982]: E0122 06:49:09.194535 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="registry-server" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194545 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="registry-server" Jan 22 06:49:09 crc kubenswrapper[4982]: E0122 06:49:09.194570 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="extract-utilities" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194578 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="extract-utilities" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194728 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b38cb7c7-9698-4142-896b-01cbb320f58a" containerName="collect-profiles" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.194745 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5823ea57-34e2-47b4-8b7e-647dda7edf38" containerName="registry-server" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.195999 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.218790 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.390833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.391148 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn9gz\" (UniqueName: \"kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.391335 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.492676 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn9gz\" (UniqueName: \"kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.492785 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.492828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.493708 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.493793 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.527158 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn9gz\" (UniqueName: \"kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz\") pod \"certified-operators-zzmdr\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:09 crc kubenswrapper[4982]: I0122 06:49:09.817897 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:10 crc kubenswrapper[4982]: I0122 06:49:10.086627 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:10 crc kubenswrapper[4982]: I0122 06:49:10.773936 4982 generic.go:334] "Generic (PLEG): container finished" podID="5f072dbf-9100-4697-a139-0a9424898487" containerID="ad0d7241e61a8d3dd0f61ba48d6d8b4eabf1039269f174973a297af818d9b546" exitCode=0 Jan 22 06:49:10 crc kubenswrapper[4982]: I0122 06:49:10.774044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerDied","Data":"ad0d7241e61a8d3dd0f61ba48d6d8b4eabf1039269f174973a297af818d9b546"} Jan 22 06:49:10 crc kubenswrapper[4982]: I0122 06:49:10.774186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerStarted","Data":"3112e1064b92401fe110e93588dc22fa149c6d3753e9fbfcc93849aa4773de75"} Jan 22 06:49:10 crc kubenswrapper[4982]: I0122 06:49:10.776915 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:49:11 crc kubenswrapper[4982]: I0122 06:49:11.783207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerStarted","Data":"879359ae0ff6b716456316b9ba3eb35ab4cdbf59f718907348340a01faca3744"} Jan 22 06:49:12 crc kubenswrapper[4982]: I0122 06:49:12.795633 4982 generic.go:334] "Generic (PLEG): container finished" podID="5f072dbf-9100-4697-a139-0a9424898487" containerID="879359ae0ff6b716456316b9ba3eb35ab4cdbf59f718907348340a01faca3744" exitCode=0 Jan 22 06:49:12 crc kubenswrapper[4982]: I0122 06:49:12.795689 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerDied","Data":"879359ae0ff6b716456316b9ba3eb35ab4cdbf59f718907348340a01faca3744"} Jan 22 06:49:13 crc kubenswrapper[4982]: I0122 06:49:13.807157 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerStarted","Data":"e2e37d00c89491744f198133e2efc5e76fd85014159c8bd2e0d5b8cd40beed73"} Jan 22 06:49:13 crc kubenswrapper[4982]: I0122 06:49:13.833377 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zzmdr" podStartSLOduration=2.43493927 podStartE2EDuration="4.833353272s" podCreationTimestamp="2026-01-22 06:49:09 +0000 UTC" firstStartedPulling="2026-01-22 06:49:10.776670344 +0000 UTC m=+3811.615308347" lastFinishedPulling="2026-01-22 06:49:13.175084346 +0000 UTC m=+3814.013722349" observedRunningTime="2026-01-22 06:49:13.825936783 +0000 UTC m=+3814.664574826" watchObservedRunningTime="2026-01-22 06:49:13.833353272 +0000 UTC m=+3814.671991315" Jan 22 06:49:18 crc kubenswrapper[4982]: I0122 06:49:18.973696 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:49:18 crc kubenswrapper[4982]: I0122 06:49:18.974102 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:49:19 crc kubenswrapper[4982]: I0122 06:49:19.818828 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:19 crc kubenswrapper[4982]: I0122 06:49:19.818948 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:19 crc kubenswrapper[4982]: I0122 06:49:19.889653 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:19 crc kubenswrapper[4982]: I0122 06:49:19.956405 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:20 crc kubenswrapper[4982]: I0122 06:49:20.141746 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:21 crc kubenswrapper[4982]: I0122 06:49:21.874484 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zzmdr" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="registry-server" containerID="cri-o://e2e37d00c89491744f198133e2efc5e76fd85014159c8bd2e0d5b8cd40beed73" gracePeriod=2 Jan 22 06:49:23 crc kubenswrapper[4982]: I0122 06:49:23.936214 4982 generic.go:334] "Generic (PLEG): container finished" podID="5f072dbf-9100-4697-a139-0a9424898487" containerID="e2e37d00c89491744f198133e2efc5e76fd85014159c8bd2e0d5b8cd40beed73" exitCode=0 Jan 22 06:49:23 crc kubenswrapper[4982]: I0122 06:49:23.936355 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerDied","Data":"e2e37d00c89491744f198133e2efc5e76fd85014159c8bd2e0d5b8cd40beed73"} Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.148303 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.229843 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content\") pod \"5f072dbf-9100-4697-a139-0a9424898487\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.229964 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities\") pod \"5f072dbf-9100-4697-a139-0a9424898487\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.230040 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn9gz\" (UniqueName: \"kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz\") pod \"5f072dbf-9100-4697-a139-0a9424898487\" (UID: \"5f072dbf-9100-4697-a139-0a9424898487\") " Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.232086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities" (OuterVolumeSpecName: "utilities") pod "5f072dbf-9100-4697-a139-0a9424898487" (UID: "5f072dbf-9100-4697-a139-0a9424898487"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.248323 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz" (OuterVolumeSpecName: "kube-api-access-tn9gz") pod "5f072dbf-9100-4697-a139-0a9424898487" (UID: "5f072dbf-9100-4697-a139-0a9424898487"). InnerVolumeSpecName "kube-api-access-tn9gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.285830 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f072dbf-9100-4697-a139-0a9424898487" (UID: "5f072dbf-9100-4697-a139-0a9424898487"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.331776 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn9gz\" (UniqueName: \"kubernetes.io/projected/5f072dbf-9100-4697-a139-0a9424898487-kube-api-access-tn9gz\") on node \"crc\" DevicePath \"\"" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.331835 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.331895 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f072dbf-9100-4697-a139-0a9424898487-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.947846 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zzmdr" event={"ID":"5f072dbf-9100-4697-a139-0a9424898487","Type":"ContainerDied","Data":"3112e1064b92401fe110e93588dc22fa149c6d3753e9fbfcc93849aa4773de75"} Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.947961 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zzmdr" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.947979 4982 scope.go:117] "RemoveContainer" containerID="e2e37d00c89491744f198133e2efc5e76fd85014159c8bd2e0d5b8cd40beed73" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.981559 4982 scope.go:117] "RemoveContainer" containerID="879359ae0ff6b716456316b9ba3eb35ab4cdbf59f718907348340a01faca3744" Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.985377 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:24 crc kubenswrapper[4982]: I0122 06:49:24.991988 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zzmdr"] Jan 22 06:49:25 crc kubenswrapper[4982]: I0122 06:49:25.007360 4982 scope.go:117] "RemoveContainer" containerID="ad0d7241e61a8d3dd0f61ba48d6d8b4eabf1039269f174973a297af818d9b546" Jan 22 06:49:25 crc kubenswrapper[4982]: I0122 06:49:25.735710 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f072dbf-9100-4697-a139-0a9424898487" path="/var/lib/kubelet/pods/5f072dbf-9100-4697-a139-0a9424898487/volumes" Jan 22 06:49:48 crc kubenswrapper[4982]: I0122 06:49:48.974444 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:49:48 crc kubenswrapper[4982]: I0122 06:49:48.974972 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:49:48 crc kubenswrapper[4982]: I0122 06:49:48.975025 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:49:48 crc kubenswrapper[4982]: I0122 06:49:48.975720 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:49:48 crc kubenswrapper[4982]: I0122 06:49:48.975790 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" gracePeriod=600 Jan 22 06:49:49 crc kubenswrapper[4982]: E0122 06:49:49.105502 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:49:49 crc kubenswrapper[4982]: I0122 06:49:49.151090 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" exitCode=0 Jan 22 06:49:49 crc kubenswrapper[4982]: I0122 06:49:49.151132 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2"} Jan 22 06:49:49 crc kubenswrapper[4982]: I0122 06:49:49.151200 4982 scope.go:117] "RemoveContainer" containerID="19ddd58d8ee9dfa3199bf4ed16700388263bf055f586b14c3039b4d0ff47875e" Jan 22 06:49:49 crc kubenswrapper[4982]: I0122 06:49:49.152514 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:49:49 crc kubenswrapper[4982]: E0122 06:49:49.153227 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:50:03 crc kubenswrapper[4982]: I0122 06:50:03.721102 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:50:03 crc kubenswrapper[4982]: E0122 06:50:03.722307 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:50:18 crc kubenswrapper[4982]: I0122 06:50:18.720003 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:50:18 crc kubenswrapper[4982]: E0122 06:50:18.721216 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:50:31 crc kubenswrapper[4982]: I0122 06:50:31.719339 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:50:31 crc kubenswrapper[4982]: E0122 06:50:31.720773 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:50:45 crc kubenswrapper[4982]: I0122 06:50:45.719377 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:50:45 crc kubenswrapper[4982]: E0122 06:50:45.720194 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:50:57 crc kubenswrapper[4982]: I0122 06:50:57.720003 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:50:57 crc kubenswrapper[4982]: E0122 06:50:57.721145 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:51:08 crc kubenswrapper[4982]: I0122 06:51:08.719837 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:51:08 crc kubenswrapper[4982]: E0122 06:51:08.720926 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:51:21 crc kubenswrapper[4982]: I0122 06:51:21.719515 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:51:21 crc kubenswrapper[4982]: E0122 06:51:21.720399 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:51:36 crc kubenswrapper[4982]: I0122 06:51:36.719167 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:51:36 crc kubenswrapper[4982]: E0122 06:51:36.719970 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:51:47 crc kubenswrapper[4982]: I0122 06:51:47.719746 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:51:47 crc kubenswrapper[4982]: E0122 06:51:47.720653 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:52:01 crc kubenswrapper[4982]: I0122 06:52:01.719476 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:52:01 crc kubenswrapper[4982]: E0122 06:52:01.720279 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:52:13 crc kubenswrapper[4982]: I0122 06:52:13.719908 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:52:13 crc kubenswrapper[4982]: E0122 06:52:13.720902 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:52:24 crc kubenswrapper[4982]: I0122 06:52:24.719358 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:52:24 crc kubenswrapper[4982]: E0122 06:52:24.720382 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:52:35 crc kubenswrapper[4982]: I0122 06:52:35.718943 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:52:35 crc kubenswrapper[4982]: E0122 06:52:35.719664 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:52:48 crc kubenswrapper[4982]: I0122 06:52:48.719918 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:52:48 crc kubenswrapper[4982]: E0122 06:52:48.720844 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:01 crc kubenswrapper[4982]: I0122 06:53:01.719701 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:53:01 crc kubenswrapper[4982]: E0122 06:53:01.720398 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:13 crc kubenswrapper[4982]: I0122 06:53:13.719875 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:53:13 crc kubenswrapper[4982]: E0122 06:53:13.720732 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:27 crc kubenswrapper[4982]: I0122 06:53:27.719250 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:53:27 crc kubenswrapper[4982]: E0122 06:53:27.720420 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:40 crc kubenswrapper[4982]: I0122 06:53:40.720545 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:53:40 crc kubenswrapper[4982]: E0122 06:53:40.721809 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.364401 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 06:53:44 crc kubenswrapper[4982]: E0122 06:53:44.365150 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="registry-server" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.365166 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="registry-server" Jan 22 06:53:44 crc kubenswrapper[4982]: E0122 06:53:44.365183 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="extract-content" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.365190 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="extract-content" Jan 22 06:53:44 crc kubenswrapper[4982]: E0122 06:53:44.365206 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="extract-utilities" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.365215 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="extract-utilities" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.365382 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f072dbf-9100-4697-a139-0a9424898487" containerName="registry-server" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.366574 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.394939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.509374 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.509614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.510070 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmgjj\" (UniqueName: \"kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.612523 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.613435 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.613597 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmgjj\" (UniqueName: \"kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.613655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.614589 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.636280 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmgjj\" (UniqueName: \"kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj\") pod \"community-operators-mxbrn\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:44 crc kubenswrapper[4982]: I0122 06:53:44.699509 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:45 crc kubenswrapper[4982]: I0122 06:53:45.205197 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 06:53:45 crc kubenswrapper[4982]: W0122 06:53:45.210137 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58f845d6_63f8_420f_ab6d_deb415896e42.slice/crio-6d5c9c555f43995da052f97e9e5e9d59dccfdb2176b7197c80a110970a68ec92 WatchSource:0}: Error finding container 6d5c9c555f43995da052f97e9e5e9d59dccfdb2176b7197c80a110970a68ec92: Status 404 returned error can't find the container with id 6d5c9c555f43995da052f97e9e5e9d59dccfdb2176b7197c80a110970a68ec92 Jan 22 06:53:45 crc kubenswrapper[4982]: I0122 06:53:45.941282 4982 generic.go:334] "Generic (PLEG): container finished" podID="58f845d6-63f8-420f-ab6d-deb415896e42" containerID="77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c" exitCode=0 Jan 22 06:53:45 crc kubenswrapper[4982]: I0122 06:53:45.941471 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerDied","Data":"77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c"} Jan 22 06:53:45 crc kubenswrapper[4982]: I0122 06:53:45.941640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerStarted","Data":"6d5c9c555f43995da052f97e9e5e9d59dccfdb2176b7197c80a110970a68ec92"} Jan 22 06:53:49 crc kubenswrapper[4982]: I0122 06:53:49.983693 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerStarted","Data":"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672"} Jan 22 06:53:50 crc kubenswrapper[4982]: I0122 06:53:50.996575 4982 generic.go:334] "Generic (PLEG): container finished" podID="58f845d6-63f8-420f-ab6d-deb415896e42" containerID="74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672" exitCode=0 Jan 22 06:53:50 crc kubenswrapper[4982]: I0122 06:53:50.996633 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerDied","Data":"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672"} Jan 22 06:53:52 crc kubenswrapper[4982]: I0122 06:53:52.008209 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerStarted","Data":"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56"} Jan 22 06:53:52 crc kubenswrapper[4982]: I0122 06:53:52.042088 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mxbrn" podStartSLOduration=2.499970436 podStartE2EDuration="8.042062599s" podCreationTimestamp="2026-01-22 06:53:44 +0000 UTC" firstStartedPulling="2026-01-22 06:53:45.943892592 +0000 UTC m=+4086.782530615" lastFinishedPulling="2026-01-22 06:53:51.485984745 +0000 UTC m=+4092.324622778" observedRunningTime="2026-01-22 06:53:52.031587207 +0000 UTC m=+4092.870225280" watchObservedRunningTime="2026-01-22 06:53:52.042062599 +0000 UTC m=+4092.880700602" Jan 22 06:53:53 crc kubenswrapper[4982]: I0122 06:53:53.719229 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:53:53 crc kubenswrapper[4982]: E0122 06:53:53.719939 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:53:54 crc kubenswrapper[4982]: I0122 06:53:54.700453 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:54 crc kubenswrapper[4982]: I0122 06:53:54.700544 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:53:54 crc kubenswrapper[4982]: I0122 06:53:54.772413 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:54:04 crc kubenswrapper[4982]: I0122 06:54:04.777989 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 06:54:04 crc kubenswrapper[4982]: I0122 06:54:04.867894 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 06:54:04 crc kubenswrapper[4982]: I0122 06:54:04.925770 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 06:54:04 crc kubenswrapper[4982]: I0122 06:54:04.926458 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-85lr9" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="registry-server" containerID="cri-o://cb1a54248d20f69851f0b7b9fed7853d8f741c2a333beeab85d6da35edce4e8a" gracePeriod=2 Jan 22 06:54:06 crc kubenswrapper[4982]: I0122 06:54:06.142031 4982 generic.go:334] "Generic (PLEG): container finished" podID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerID="cb1a54248d20f69851f0b7b9fed7853d8f741c2a333beeab85d6da35edce4e8a" exitCode=0 Jan 22 06:54:06 crc kubenswrapper[4982]: I0122 06:54:06.142133 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerDied","Data":"cb1a54248d20f69851f0b7b9fed7853d8f741c2a333beeab85d6da35edce4e8a"} Jan 22 06:54:07 crc kubenswrapper[4982]: I0122 06:54:07.720417 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:54:07 crc kubenswrapper[4982]: E0122 06:54:07.720913 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:54:07 crc kubenswrapper[4982]: I0122 06:54:07.964146 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85lr9" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.103533 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities\") pod \"5afe272a-253c-4286-b86b-fc78a1cbb21d\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.103690 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vff77\" (UniqueName: \"kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77\") pod \"5afe272a-253c-4286-b86b-fc78a1cbb21d\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.103756 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content\") pod \"5afe272a-253c-4286-b86b-fc78a1cbb21d\" (UID: \"5afe272a-253c-4286-b86b-fc78a1cbb21d\") " Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.104772 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities" (OuterVolumeSpecName: "utilities") pod "5afe272a-253c-4286-b86b-fc78a1cbb21d" (UID: "5afe272a-253c-4286-b86b-fc78a1cbb21d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.110106 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77" (OuterVolumeSpecName: "kube-api-access-vff77") pod "5afe272a-253c-4286-b86b-fc78a1cbb21d" (UID: "5afe272a-253c-4286-b86b-fc78a1cbb21d"). InnerVolumeSpecName "kube-api-access-vff77". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.164465 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85lr9" event={"ID":"5afe272a-253c-4286-b86b-fc78a1cbb21d","Type":"ContainerDied","Data":"12ef494cf3ac190fec42db75767cb4715c040fb3616e02200c156248dfcc0a0b"} Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.164539 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85lr9" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.164555 4982 scope.go:117] "RemoveContainer" containerID="cb1a54248d20f69851f0b7b9fed7853d8f741c2a333beeab85d6da35edce4e8a" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.168795 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5afe272a-253c-4286-b86b-fc78a1cbb21d" (UID: "5afe272a-253c-4286-b86b-fc78a1cbb21d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.191677 4982 scope.go:117] "RemoveContainer" containerID="de0254e3b6399523e23737fdf1438fb129c634d87944beae16843819ea580df1" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.206318 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vff77\" (UniqueName: \"kubernetes.io/projected/5afe272a-253c-4286-b86b-fc78a1cbb21d-kube-api-access-vff77\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.206348 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.206392 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5afe272a-253c-4286-b86b-fc78a1cbb21d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.213792 4982 scope.go:117] "RemoveContainer" containerID="b2c4aee486ece5927491b5b3762a73e4004c2578b28fae105a249eaa9023fd26" Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.496168 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 06:54:08 crc kubenswrapper[4982]: I0122 06:54:08.502264 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-85lr9"] Jan 22 06:54:09 crc kubenswrapper[4982]: I0122 06:54:09.733893 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" path="/var/lib/kubelet/pods/5afe272a-253c-4286-b86b-fc78a1cbb21d/volumes" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.915719 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:19 crc kubenswrapper[4982]: E0122 06:54:19.916777 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="extract-utilities" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.916793 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="extract-utilities" Jan 22 06:54:19 crc kubenswrapper[4982]: E0122 06:54:19.916805 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="registry-server" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.916812 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="registry-server" Jan 22 06:54:19 crc kubenswrapper[4982]: E0122 06:54:19.916820 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="extract-content" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.916831 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="extract-content" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.917066 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5afe272a-253c-4286-b86b-fc78a1cbb21d" containerName="registry-server" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.918319 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:19 crc kubenswrapper[4982]: I0122 06:54:19.963493 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.092814 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz2pl\" (UniqueName: \"kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.093054 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.093110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.194023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz2pl\" (UniqueName: \"kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.194176 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.194221 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.194806 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.194848 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.214934 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz2pl\" (UniqueName: \"kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl\") pod \"redhat-operators-5llsw\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.257193 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:20 crc kubenswrapper[4982]: I0122 06:54:20.759325 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:21 crc kubenswrapper[4982]: I0122 06:54:21.295120 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerID="2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85" exitCode=0 Jan 22 06:54:21 crc kubenswrapper[4982]: I0122 06:54:21.295162 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerDied","Data":"2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85"} Jan 22 06:54:21 crc kubenswrapper[4982]: I0122 06:54:21.295187 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerStarted","Data":"1a67e8b1fe95fe12bfa7f53898b0b55e6cadeba221af9993dccd3662c4c3c4ad"} Jan 22 06:54:21 crc kubenswrapper[4982]: I0122 06:54:21.296645 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:54:21 crc kubenswrapper[4982]: I0122 06:54:21.719944 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:54:21 crc kubenswrapper[4982]: E0122 06:54:21.720286 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:54:22 crc kubenswrapper[4982]: I0122 06:54:22.307757 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerStarted","Data":"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e"} Jan 22 06:54:23 crc kubenswrapper[4982]: I0122 06:54:23.318019 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerID="514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e" exitCode=0 Jan 22 06:54:23 crc kubenswrapper[4982]: I0122 06:54:23.318095 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerDied","Data":"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e"} Jan 22 06:54:24 crc kubenswrapper[4982]: I0122 06:54:24.333066 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerStarted","Data":"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae"} Jan 22 06:54:24 crc kubenswrapper[4982]: I0122 06:54:24.365364 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5llsw" podStartSLOduration=2.94159487 podStartE2EDuration="5.365324136s" podCreationTimestamp="2026-01-22 06:54:19 +0000 UTC" firstStartedPulling="2026-01-22 06:54:21.296347526 +0000 UTC m=+4122.134985539" lastFinishedPulling="2026-01-22 06:54:23.720076772 +0000 UTC m=+4124.558714805" observedRunningTime="2026-01-22 06:54:24.359821048 +0000 UTC m=+4125.198459061" watchObservedRunningTime="2026-01-22 06:54:24.365324136 +0000 UTC m=+4125.203962169" Jan 22 06:54:30 crc kubenswrapper[4982]: I0122 06:54:30.257926 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:30 crc kubenswrapper[4982]: I0122 06:54:30.258285 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:31 crc kubenswrapper[4982]: I0122 06:54:31.330282 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5llsw" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="registry-server" probeResult="failure" output=< Jan 22 06:54:31 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 06:54:31 crc kubenswrapper[4982]: > Jan 22 06:54:33 crc kubenswrapper[4982]: I0122 06:54:33.719926 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:54:33 crc kubenswrapper[4982]: E0122 06:54:33.720448 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:54:40 crc kubenswrapper[4982]: I0122 06:54:40.340500 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:40 crc kubenswrapper[4982]: I0122 06:54:40.398545 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:40 crc kubenswrapper[4982]: I0122 06:54:40.594033 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:41 crc kubenswrapper[4982]: I0122 06:54:41.485309 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5llsw" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="registry-server" containerID="cri-o://ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae" gracePeriod=2 Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.334218 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.427179 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content\") pod \"59d0eb70-33ea-4236-abe4-630258a8c7d1\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.427240 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz2pl\" (UniqueName: \"kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl\") pod \"59d0eb70-33ea-4236-abe4-630258a8c7d1\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.427292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities\") pod \"59d0eb70-33ea-4236-abe4-630258a8c7d1\" (UID: \"59d0eb70-33ea-4236-abe4-630258a8c7d1\") " Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.428607 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities" (OuterVolumeSpecName: "utilities") pod "59d0eb70-33ea-4236-abe4-630258a8c7d1" (UID: "59d0eb70-33ea-4236-abe4-630258a8c7d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.435098 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl" (OuterVolumeSpecName: "kube-api-access-jz2pl") pod "59d0eb70-33ea-4236-abe4-630258a8c7d1" (UID: "59d0eb70-33ea-4236-abe4-630258a8c7d1"). InnerVolumeSpecName "kube-api-access-jz2pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.511794 4982 generic.go:334] "Generic (PLEG): container finished" podID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerID="ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae" exitCode=0 Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.511884 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5llsw" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.511889 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerDied","Data":"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae"} Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.511922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5llsw" event={"ID":"59d0eb70-33ea-4236-abe4-630258a8c7d1","Type":"ContainerDied","Data":"1a67e8b1fe95fe12bfa7f53898b0b55e6cadeba221af9993dccd3662c4c3c4ad"} Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.511941 4982 scope.go:117] "RemoveContainer" containerID="ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.529003 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.529044 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz2pl\" (UniqueName: \"kubernetes.io/projected/59d0eb70-33ea-4236-abe4-630258a8c7d1-kube-api-access-jz2pl\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.531773 4982 scope.go:117] "RemoveContainer" containerID="514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.555483 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "59d0eb70-33ea-4236-abe4-630258a8c7d1" (UID: "59d0eb70-33ea-4236-abe4-630258a8c7d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.565761 4982 scope.go:117] "RemoveContainer" containerID="2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.583557 4982 scope.go:117] "RemoveContainer" containerID="ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae" Jan 22 06:54:44 crc kubenswrapper[4982]: E0122 06:54:44.584090 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae\": container with ID starting with ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae not found: ID does not exist" containerID="ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.584152 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae"} err="failed to get container status \"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae\": rpc error: code = NotFound desc = could not find container \"ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae\": container with ID starting with ae2de4c1b002c1eef56e98f11df710bde6cba78b19fcc340be3874fdaeab1cae not found: ID does not exist" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.584198 4982 scope.go:117] "RemoveContainer" containerID="514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e" Jan 22 06:54:44 crc kubenswrapper[4982]: E0122 06:54:44.584689 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e\": container with ID starting with 514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e not found: ID does not exist" containerID="514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.584746 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e"} err="failed to get container status \"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e\": rpc error: code = NotFound desc = could not find container \"514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e\": container with ID starting with 514e3432dd05b92185321dc1eca59fd91f2443a0662478fbdf5c38166fbc897e not found: ID does not exist" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.584780 4982 scope.go:117] "RemoveContainer" containerID="2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85" Jan 22 06:54:44 crc kubenswrapper[4982]: E0122 06:54:44.585162 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85\": container with ID starting with 2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85 not found: ID does not exist" containerID="2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.585204 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85"} err="failed to get container status \"2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85\": rpc error: code = NotFound desc = could not find container \"2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85\": container with ID starting with 2ec5490755753d875694062bdd3f78452cc2d518140cfe6d9bb4a388c8853c85 not found: ID does not exist" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.630423 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59d0eb70-33ea-4236-abe4-630258a8c7d1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.719767 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:54:44 crc kubenswrapper[4982]: E0122 06:54:44.720337 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.876409 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:44 crc kubenswrapper[4982]: I0122 06:54:44.887967 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5llsw"] Jan 22 06:54:45 crc kubenswrapper[4982]: I0122 06:54:45.737560 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" path="/var/lib/kubelet/pods/59d0eb70-33ea-4236-abe4-630258a8c7d1/volumes" Jan 22 06:54:56 crc kubenswrapper[4982]: I0122 06:54:56.718996 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:54:57 crc kubenswrapper[4982]: I0122 06:54:57.636904 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91"} Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.245082 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:03 crc kubenswrapper[4982]: E0122 06:55:03.246239 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="extract-utilities" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.246258 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="extract-utilities" Jan 22 06:55:03 crc kubenswrapper[4982]: E0122 06:55:03.246283 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="registry-server" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.246296 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="registry-server" Jan 22 06:55:03 crc kubenswrapper[4982]: E0122 06:55:03.246318 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="extract-content" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.246329 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="extract-content" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.246521 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="59d0eb70-33ea-4236-abe4-630258a8c7d1" containerName="registry-server" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.248090 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.263515 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.434627 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.434750 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.434833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmnz8\" (UniqueName: \"kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.536312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.536389 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.536423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmnz8\" (UniqueName: \"kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.536893 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.537209 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.561998 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmnz8\" (UniqueName: \"kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8\") pod \"redhat-marketplace-nk8s9\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:03 crc kubenswrapper[4982]: I0122 06:55:03.588224 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:04 crc kubenswrapper[4982]: I0122 06:55:04.081303 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:04 crc kubenswrapper[4982]: I0122 06:55:04.811361 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerID="ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087" exitCode=0 Jan 22 06:55:04 crc kubenswrapper[4982]: I0122 06:55:04.811460 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerDied","Data":"ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087"} Jan 22 06:55:04 crc kubenswrapper[4982]: I0122 06:55:04.811711 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerStarted","Data":"f6046972181846951c266d8153472aa47d41a045c58e5876778cc0c8b9f23fe5"} Jan 22 06:55:05 crc kubenswrapper[4982]: I0122 06:55:05.822019 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerStarted","Data":"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01"} Jan 22 06:55:06 crc kubenswrapper[4982]: I0122 06:55:06.833846 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerID="d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01" exitCode=0 Jan 22 06:55:06 crc kubenswrapper[4982]: I0122 06:55:06.833988 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerDied","Data":"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01"} Jan 22 06:55:07 crc kubenswrapper[4982]: I0122 06:55:07.845517 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerStarted","Data":"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078"} Jan 22 06:55:07 crc kubenswrapper[4982]: I0122 06:55:07.872840 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nk8s9" podStartSLOduration=2.330130316 podStartE2EDuration="4.872807753s" podCreationTimestamp="2026-01-22 06:55:03 +0000 UTC" firstStartedPulling="2026-01-22 06:55:04.813469253 +0000 UTC m=+4165.652107276" lastFinishedPulling="2026-01-22 06:55:07.35614668 +0000 UTC m=+4168.194784713" observedRunningTime="2026-01-22 06:55:07.864650123 +0000 UTC m=+4168.703288156" watchObservedRunningTime="2026-01-22 06:55:07.872807753 +0000 UTC m=+4168.711445766" Jan 22 06:55:13 crc kubenswrapper[4982]: I0122 06:55:13.588656 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:13 crc kubenswrapper[4982]: I0122 06:55:13.590332 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:13 crc kubenswrapper[4982]: I0122 06:55:13.669286 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:13 crc kubenswrapper[4982]: I0122 06:55:13.985168 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:14 crc kubenswrapper[4982]: I0122 06:55:14.049758 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:15 crc kubenswrapper[4982]: I0122 06:55:15.944472 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nk8s9" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="registry-server" containerID="cri-o://e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078" gracePeriod=2 Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.325132 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.347833 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities\") pod \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.347904 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content\") pod \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.347936 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmnz8\" (UniqueName: \"kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8\") pod \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\" (UID: \"2a082e4c-9e29-44ce-9158-2cc37cb4025c\") " Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.349064 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities" (OuterVolumeSpecName: "utilities") pod "2a082e4c-9e29-44ce-9158-2cc37cb4025c" (UID: "2a082e4c-9e29-44ce-9158-2cc37cb4025c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.393881 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8" (OuterVolumeSpecName: "kube-api-access-qmnz8") pod "2a082e4c-9e29-44ce-9158-2cc37cb4025c" (UID: "2a082e4c-9e29-44ce-9158-2cc37cb4025c"). InnerVolumeSpecName "kube-api-access-qmnz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.449012 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.449045 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmnz8\" (UniqueName: \"kubernetes.io/projected/2a082e4c-9e29-44ce-9158-2cc37cb4025c-kube-api-access-qmnz8\") on node \"crc\" DevicePath \"\"" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.492967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a082e4c-9e29-44ce-9158-2cc37cb4025c" (UID: "2a082e4c-9e29-44ce-9158-2cc37cb4025c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.550757 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a082e4c-9e29-44ce-9158-2cc37cb4025c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.955579 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerID="e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078" exitCode=0 Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.955636 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerDied","Data":"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078"} Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.955675 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk8s9" event={"ID":"2a082e4c-9e29-44ce-9158-2cc37cb4025c","Type":"ContainerDied","Data":"f6046972181846951c266d8153472aa47d41a045c58e5876778cc0c8b9f23fe5"} Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.955715 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk8s9" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.955783 4982 scope.go:117] "RemoveContainer" containerID="e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.990564 4982 scope.go:117] "RemoveContainer" containerID="d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01" Jan 22 06:55:16 crc kubenswrapper[4982]: I0122 06:55:16.994406 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.013459 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk8s9"] Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.014427 4982 scope.go:117] "RemoveContainer" containerID="ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.035064 4982 scope.go:117] "RemoveContainer" containerID="e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078" Jan 22 06:55:17 crc kubenswrapper[4982]: E0122 06:55:17.035580 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078\": container with ID starting with e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078 not found: ID does not exist" containerID="e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.035656 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078"} err="failed to get container status \"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078\": rpc error: code = NotFound desc = could not find container \"e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078\": container with ID starting with e383cd408cbe851292faca2d9714d312cfa4dd94d715017531b965b676008078 not found: ID does not exist" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.035693 4982 scope.go:117] "RemoveContainer" containerID="d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01" Jan 22 06:55:17 crc kubenswrapper[4982]: E0122 06:55:17.036247 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01\": container with ID starting with d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01 not found: ID does not exist" containerID="d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.036318 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01"} err="failed to get container status \"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01\": rpc error: code = NotFound desc = could not find container \"d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01\": container with ID starting with d43f5e1ce40dbbc651ce018ac3a1d9113d63e9501ac01af57d829e2befc08b01 not found: ID does not exist" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.036337 4982 scope.go:117] "RemoveContainer" containerID="ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087" Jan 22 06:55:17 crc kubenswrapper[4982]: E0122 06:55:17.036782 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087\": container with ID starting with ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087 not found: ID does not exist" containerID="ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.036836 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087"} err="failed to get container status \"ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087\": rpc error: code = NotFound desc = could not find container \"ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087\": container with ID starting with ac421bcbfaf89599007904b6550ae530be2dcc0185eb6128f1ca4bb210f2c087 not found: ID does not exist" Jan 22 06:55:17 crc kubenswrapper[4982]: I0122 06:55:17.737337 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" path="/var/lib/kubelet/pods/2a082e4c-9e29-44ce-9158-2cc37cb4025c/volumes" Jan 22 06:57:18 crc kubenswrapper[4982]: I0122 06:57:18.973971 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:57:18 crc kubenswrapper[4982]: I0122 06:57:18.974568 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:57:48 crc kubenswrapper[4982]: I0122 06:57:48.974414 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:57:48 crc kubenswrapper[4982]: I0122 06:57:48.975019 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:58:18 crc kubenswrapper[4982]: I0122 06:58:18.974541 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 06:58:18 crc kubenswrapper[4982]: I0122 06:58:18.975242 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 06:58:18 crc kubenswrapper[4982]: I0122 06:58:18.975305 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 06:58:18 crc kubenswrapper[4982]: I0122 06:58:18.976240 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 06:58:18 crc kubenswrapper[4982]: I0122 06:58:18.976356 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91" gracePeriod=600 Jan 22 06:58:20 crc kubenswrapper[4982]: I0122 06:58:20.539979 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91" exitCode=0 Jan 22 06:58:20 crc kubenswrapper[4982]: I0122 06:58:20.540045 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91"} Jan 22 06:58:20 crc kubenswrapper[4982]: I0122 06:58:20.540337 4982 scope.go:117] "RemoveContainer" containerID="2bc401c1226a58ddc68be49a09047e96ed0187d9d6cd02a4e3dfcfc5251282c2" Jan 22 06:58:21 crc kubenswrapper[4982]: I0122 06:58:21.550655 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673"} Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.920671 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 06:59:48 crc kubenswrapper[4982]: E0122 06:59:48.921977 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="extract-content" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.922009 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="extract-content" Jan 22 06:59:48 crc kubenswrapper[4982]: E0122 06:59:48.922056 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="registry-server" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.922072 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="registry-server" Jan 22 06:59:48 crc kubenswrapper[4982]: E0122 06:59:48.922107 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="extract-utilities" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.922126 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="extract-utilities" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.922463 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a082e4c-9e29-44ce-9158-2cc37cb4025c" containerName="registry-server" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.924671 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:48 crc kubenswrapper[4982]: I0122 06:59:48.947200 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.072422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.072634 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.072758 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v72lj\" (UniqueName: \"kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.174023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v72lj\" (UniqueName: \"kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.174398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.174440 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.174835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.174877 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.452642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v72lj\" (UniqueName: \"kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj\") pod \"certified-operators-mb8q5\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.547722 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:49 crc kubenswrapper[4982]: I0122 06:59:49.968750 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 06:59:49 crc kubenswrapper[4982]: W0122 06:59:49.973958 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06085b1c_55e5_41ef_be5d_10d6c689c40f.slice/crio-9efed1d0e3b1cb10277021a970788210d12967fda696eca01a44b253078c90be WatchSource:0}: Error finding container 9efed1d0e3b1cb10277021a970788210d12967fda696eca01a44b253078c90be: Status 404 returned error can't find the container with id 9efed1d0e3b1cb10277021a970788210d12967fda696eca01a44b253078c90be Jan 22 06:59:50 crc kubenswrapper[4982]: I0122 06:59:50.315557 4982 generic.go:334] "Generic (PLEG): container finished" podID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerID="1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65" exitCode=0 Jan 22 06:59:50 crc kubenswrapper[4982]: I0122 06:59:50.315606 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerDied","Data":"1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65"} Jan 22 06:59:50 crc kubenswrapper[4982]: I0122 06:59:50.315658 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerStarted","Data":"9efed1d0e3b1cb10277021a970788210d12967fda696eca01a44b253078c90be"} Jan 22 06:59:50 crc kubenswrapper[4982]: I0122 06:59:50.317645 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 06:59:55 crc kubenswrapper[4982]: I0122 06:59:55.367352 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerStarted","Data":"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728"} Jan 22 06:59:56 crc kubenswrapper[4982]: I0122 06:59:56.376461 4982 generic.go:334] "Generic (PLEG): container finished" podID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerID="90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728" exitCode=0 Jan 22 06:59:56 crc kubenswrapper[4982]: I0122 06:59:56.376515 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerDied","Data":"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728"} Jan 22 06:59:58 crc kubenswrapper[4982]: I0122 06:59:58.410555 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerStarted","Data":"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810"} Jan 22 06:59:58 crc kubenswrapper[4982]: I0122 06:59:58.441925 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mb8q5" podStartSLOduration=2.731840172 podStartE2EDuration="10.441897375s" podCreationTimestamp="2026-01-22 06:59:48 +0000 UTC" firstStartedPulling="2026-01-22 06:59:50.317103064 +0000 UTC m=+4451.155741067" lastFinishedPulling="2026-01-22 06:59:58.027160267 +0000 UTC m=+4458.865798270" observedRunningTime="2026-01-22 06:59:58.43649492 +0000 UTC m=+4459.275132923" watchObservedRunningTime="2026-01-22 06:59:58.441897375 +0000 UTC m=+4459.280535408" Jan 22 06:59:59 crc kubenswrapper[4982]: I0122 06:59:59.548001 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 06:59:59 crc kubenswrapper[4982]: I0122 06:59:59.548098 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.172402 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227"] Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.173411 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.176230 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.176431 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.188786 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227"] Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.262694 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.262737 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d98xs\" (UniqueName: \"kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.262768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.363275 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.363339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d98xs\" (UniqueName: \"kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.363379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.364660 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.374060 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.383685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d98xs\" (UniqueName: \"kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs\") pod \"collect-profiles-29484420-jt227\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.498708 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.623980 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-mb8q5" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="registry-server" probeResult="failure" output=< Jan 22 07:00:00 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:00:00 crc kubenswrapper[4982]: > Jan 22 07:00:00 crc kubenswrapper[4982]: I0122 07:00:00.947490 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227"] Jan 22 07:00:01 crc kubenswrapper[4982]: I0122 07:00:01.432891 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" event={"ID":"eb8519a1-7d8c-4f8d-a98d-640f74ff041b","Type":"ContainerStarted","Data":"ecd3dcc1e3671f9730c3bd03dd8310cc21f99f5e723ecea5e2d22596ed3979d2"} Jan 22 07:00:01 crc kubenswrapper[4982]: I0122 07:00:01.433222 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" event={"ID":"eb8519a1-7d8c-4f8d-a98d-640f74ff041b","Type":"ContainerStarted","Data":"c78cd0bf72650b9ffd08c651dc4c0ac8f4a5c63ccab0a61a06e4aa067f8f724a"} Jan 22 07:00:02 crc kubenswrapper[4982]: I0122 07:00:02.444321 4982 generic.go:334] "Generic (PLEG): container finished" podID="eb8519a1-7d8c-4f8d-a98d-640f74ff041b" containerID="ecd3dcc1e3671f9730c3bd03dd8310cc21f99f5e723ecea5e2d22596ed3979d2" exitCode=0 Jan 22 07:00:02 crc kubenswrapper[4982]: I0122 07:00:02.444492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" event={"ID":"eb8519a1-7d8c-4f8d-a98d-640f74ff041b","Type":"ContainerDied","Data":"ecd3dcc1e3671f9730c3bd03dd8310cc21f99f5e723ecea5e2d22596ed3979d2"} Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.712683 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.719130 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume\") pod \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.719250 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume\") pod \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.719416 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d98xs\" (UniqueName: \"kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs\") pod \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\" (UID: \"eb8519a1-7d8c-4f8d-a98d-640f74ff041b\") " Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.720460 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume" (OuterVolumeSpecName: "config-volume") pod "eb8519a1-7d8c-4f8d-a98d-640f74ff041b" (UID: "eb8519a1-7d8c-4f8d-a98d-640f74ff041b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.727405 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs" (OuterVolumeSpecName: "kube-api-access-d98xs") pod "eb8519a1-7d8c-4f8d-a98d-640f74ff041b" (UID: "eb8519a1-7d8c-4f8d-a98d-640f74ff041b"). InnerVolumeSpecName "kube-api-access-d98xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.728528 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eb8519a1-7d8c-4f8d-a98d-640f74ff041b" (UID: "eb8519a1-7d8c-4f8d-a98d-640f74ff041b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.822181 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d98xs\" (UniqueName: \"kubernetes.io/projected/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-kube-api-access-d98xs\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.822609 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:03 crc kubenswrapper[4982]: I0122 07:00:03.822659 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eb8519a1-7d8c-4f8d-a98d-640f74ff041b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:04 crc kubenswrapper[4982]: I0122 07:00:04.461253 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" event={"ID":"eb8519a1-7d8c-4f8d-a98d-640f74ff041b","Type":"ContainerDied","Data":"c78cd0bf72650b9ffd08c651dc4c0ac8f4a5c63ccab0a61a06e4aa067f8f724a"} Jan 22 07:00:04 crc kubenswrapper[4982]: I0122 07:00:04.461580 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c78cd0bf72650b9ffd08c651dc4c0ac8f4a5c63ccab0a61a06e4aa067f8f724a" Jan 22 07:00:04 crc kubenswrapper[4982]: I0122 07:00:04.461364 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227" Jan 22 07:00:04 crc kubenswrapper[4982]: I0122 07:00:04.519683 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz"] Jan 22 07:00:04 crc kubenswrapper[4982]: I0122 07:00:04.524629 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484375-79vqz"] Jan 22 07:00:05 crc kubenswrapper[4982]: I0122 07:00:05.730703 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b867dfc-0973-4a91-b2f7-6171931673d5" path="/var/lib/kubelet/pods/2b867dfc-0973-4a91-b2f7-6171931673d5/volumes" Jan 22 07:00:09 crc kubenswrapper[4982]: I0122 07:00:09.620571 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 07:00:09 crc kubenswrapper[4982]: I0122 07:00:09.698474 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 07:00:09 crc kubenswrapper[4982]: I0122 07:00:09.863309 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 07:00:11 crc kubenswrapper[4982]: I0122 07:00:11.515947 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mb8q5" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="registry-server" containerID="cri-o://82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810" gracePeriod=2 Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.412783 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.535829 4982 generic.go:334] "Generic (PLEG): container finished" podID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerID="82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810" exitCode=0 Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.535909 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerDied","Data":"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810"} Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.537117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mb8q5" event={"ID":"06085b1c-55e5-41ef-be5d-10d6c689c40f","Type":"ContainerDied","Data":"9efed1d0e3b1cb10277021a970788210d12967fda696eca01a44b253078c90be"} Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.535969 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mb8q5" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.537169 4982 scope.go:117] "RemoveContainer" containerID="82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.548518 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v72lj\" (UniqueName: \"kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj\") pod \"06085b1c-55e5-41ef-be5d-10d6c689c40f\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.548595 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities\") pod \"06085b1c-55e5-41ef-be5d-10d6c689c40f\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.548742 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content\") pod \"06085b1c-55e5-41ef-be5d-10d6c689c40f\" (UID: \"06085b1c-55e5-41ef-be5d-10d6c689c40f\") " Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.551414 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities" (OuterVolumeSpecName: "utilities") pod "06085b1c-55e5-41ef-be5d-10d6c689c40f" (UID: "06085b1c-55e5-41ef-be5d-10d6c689c40f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.553551 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj" (OuterVolumeSpecName: "kube-api-access-v72lj") pod "06085b1c-55e5-41ef-be5d-10d6c689c40f" (UID: "06085b1c-55e5-41ef-be5d-10d6c689c40f"). InnerVolumeSpecName "kube-api-access-v72lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.558215 4982 scope.go:117] "RemoveContainer" containerID="90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.592871 4982 scope.go:117] "RemoveContainer" containerID="1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.603579 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06085b1c-55e5-41ef-be5d-10d6c689c40f" (UID: "06085b1c-55e5-41ef-be5d-10d6c689c40f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.619581 4982 scope.go:117] "RemoveContainer" containerID="82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810" Jan 22 07:00:12 crc kubenswrapper[4982]: E0122 07:00:12.620225 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810\": container with ID starting with 82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810 not found: ID does not exist" containerID="82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.620382 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810"} err="failed to get container status \"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810\": rpc error: code = NotFound desc = could not find container \"82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810\": container with ID starting with 82ccbe75d6583781e0aa2ae762ef43a9fa0cfe4351a2b22783f2e88b0930d810 not found: ID does not exist" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.620549 4982 scope.go:117] "RemoveContainer" containerID="90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728" Jan 22 07:00:12 crc kubenswrapper[4982]: E0122 07:00:12.621068 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728\": container with ID starting with 90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728 not found: ID does not exist" containerID="90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.621119 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728"} err="failed to get container status \"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728\": rpc error: code = NotFound desc = could not find container \"90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728\": container with ID starting with 90eb4fc06cb41a652cb624bec191295e38b73bfd0a561d6f04aabe1cfece6728 not found: ID does not exist" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.621153 4982 scope.go:117] "RemoveContainer" containerID="1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65" Jan 22 07:00:12 crc kubenswrapper[4982]: E0122 07:00:12.621482 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65\": container with ID starting with 1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65 not found: ID does not exist" containerID="1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.621507 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65"} err="failed to get container status \"1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65\": rpc error: code = NotFound desc = could not find container \"1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65\": container with ID starting with 1d578406888530c466025981faa00b40a3854391239d0c21eb9cdfe68fcf8b65 not found: ID does not exist" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.650628 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.650977 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v72lj\" (UniqueName: \"kubernetes.io/projected/06085b1c-55e5-41ef-be5d-10d6c689c40f-kube-api-access-v72lj\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.651041 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06085b1c-55e5-41ef-be5d-10d6c689c40f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.886147 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 07:00:12 crc kubenswrapper[4982]: I0122 07:00:12.893513 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mb8q5"] Jan 22 07:00:13 crc kubenswrapper[4982]: I0122 07:00:13.735479 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" path="/var/lib/kubelet/pods/06085b1c-55e5-41ef-be5d-10d6c689c40f/volumes" Jan 22 07:00:48 crc kubenswrapper[4982]: I0122 07:00:48.973917 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:00:48 crc kubenswrapper[4982]: I0122 07:00:48.974444 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:01:02 crc kubenswrapper[4982]: I0122 07:01:02.304096 4982 scope.go:117] "RemoveContainer" containerID="75c8df85e10d23282ba8c88a36c747457aca29e85c8fb1a5525fbb0ae643a2d6" Jan 22 07:01:18 crc kubenswrapper[4982]: I0122 07:01:18.974443 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:01:18 crc kubenswrapper[4982]: I0122 07:01:18.975019 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.024362 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-hvxgx"] Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.032017 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-hvxgx"] Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.142818 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-dsrxb"] Jan 22 07:01:27 crc kubenswrapper[4982]: E0122 07:01:27.143174 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="extract-utilities" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143196 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="extract-utilities" Jan 22 07:01:27 crc kubenswrapper[4982]: E0122 07:01:27.143210 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="registry-server" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143219 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="registry-server" Jan 22 07:01:27 crc kubenswrapper[4982]: E0122 07:01:27.143246 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="extract-content" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143255 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="extract-content" Jan 22 07:01:27 crc kubenswrapper[4982]: E0122 07:01:27.143274 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb8519a1-7d8c-4f8d-a98d-640f74ff041b" containerName="collect-profiles" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143282 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb8519a1-7d8c-4f8d-a98d-640f74ff041b" containerName="collect-profiles" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143457 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb8519a1-7d8c-4f8d-a98d-640f74ff041b" containerName="collect-profiles" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.143476 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="06085b1c-55e5-41ef-be5d-10d6c689c40f" containerName="registry-server" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.144042 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.147353 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.147398 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.150331 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.150751 4982 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-s9zg9" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.157992 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dsrxb"] Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.217269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.217554 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.217677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl9dt\" (UniqueName: \"kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.319193 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.319533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.319638 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl9dt\" (UniqueName: \"kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.319458 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.321995 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.339605 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl9dt\" (UniqueName: \"kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt\") pod \"crc-storage-crc-dsrxb\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.524569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.727627 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ba6c4ea-fd99-43b6-84a9-26bb22ae800e" path="/var/lib/kubelet/pods/1ba6c4ea-fd99-43b6-84a9-26bb22ae800e/volumes" Jan 22 07:01:27 crc kubenswrapper[4982]: I0122 07:01:27.923072 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dsrxb"] Jan 22 07:01:28 crc kubenswrapper[4982]: I0122 07:01:28.163096 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dsrxb" event={"ID":"082f4f94-93bd-43b8-ba93-f985c04e60c7","Type":"ContainerStarted","Data":"a8441ebc87a9f24a05f193a5a6751052ba2aa03143959049774cc8f59663ea7e"} Jan 22 07:01:30 crc kubenswrapper[4982]: I0122 07:01:30.187726 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dsrxb" event={"ID":"082f4f94-93bd-43b8-ba93-f985c04e60c7","Type":"ContainerStarted","Data":"c383fad269ae361d00654f91f53a07be2b18e16463bd1bd68dae25ad216c209c"} Jan 22 07:01:30 crc kubenswrapper[4982]: I0122 07:01:30.208836 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-dsrxb" podStartSLOduration=1.914677947 podStartE2EDuration="3.208808327s" podCreationTimestamp="2026-01-22 07:01:27 +0000 UTC" firstStartedPulling="2026-01-22 07:01:27.934294633 +0000 UTC m=+4548.772932626" lastFinishedPulling="2026-01-22 07:01:29.228424973 +0000 UTC m=+4550.067063006" observedRunningTime="2026-01-22 07:01:30.207341527 +0000 UTC m=+4551.045979570" watchObservedRunningTime="2026-01-22 07:01:30.208808327 +0000 UTC m=+4551.047446360" Jan 22 07:01:31 crc kubenswrapper[4982]: I0122 07:01:31.197366 4982 generic.go:334] "Generic (PLEG): container finished" podID="082f4f94-93bd-43b8-ba93-f985c04e60c7" containerID="c383fad269ae361d00654f91f53a07be2b18e16463bd1bd68dae25ad216c209c" exitCode=0 Jan 22 07:01:31 crc kubenswrapper[4982]: I0122 07:01:31.197544 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dsrxb" event={"ID":"082f4f94-93bd-43b8-ba93-f985c04e60c7","Type":"ContainerDied","Data":"c383fad269ae361d00654f91f53a07be2b18e16463bd1bd68dae25ad216c209c"} Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.529253 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.638477 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt\") pod \"082f4f94-93bd-43b8-ba93-f985c04e60c7\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.638982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl9dt\" (UniqueName: \"kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt\") pod \"082f4f94-93bd-43b8-ba93-f985c04e60c7\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.639153 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage\") pod \"082f4f94-93bd-43b8-ba93-f985c04e60c7\" (UID: \"082f4f94-93bd-43b8-ba93-f985c04e60c7\") " Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.638684 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "082f4f94-93bd-43b8-ba93-f985c04e60c7" (UID: "082f4f94-93bd-43b8-ba93-f985c04e60c7"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.646190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt" (OuterVolumeSpecName: "kube-api-access-hl9dt") pod "082f4f94-93bd-43b8-ba93-f985c04e60c7" (UID: "082f4f94-93bd-43b8-ba93-f985c04e60c7"). InnerVolumeSpecName "kube-api-access-hl9dt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.669882 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "082f4f94-93bd-43b8-ba93-f985c04e60c7" (UID: "082f4f94-93bd-43b8-ba93-f985c04e60c7"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.740757 4982 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/082f4f94-93bd-43b8-ba93-f985c04e60c7-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.740789 4982 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/082f4f94-93bd-43b8-ba93-f985c04e60c7-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:32 crc kubenswrapper[4982]: I0122 07:01:32.740803 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl9dt\" (UniqueName: \"kubernetes.io/projected/082f4f94-93bd-43b8-ba93-f985c04e60c7-kube-api-access-hl9dt\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:33 crc kubenswrapper[4982]: I0122 07:01:33.212004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dsrxb" event={"ID":"082f4f94-93bd-43b8-ba93-f985c04e60c7","Type":"ContainerDied","Data":"a8441ebc87a9f24a05f193a5a6751052ba2aa03143959049774cc8f59663ea7e"} Jan 22 07:01:33 crc kubenswrapper[4982]: I0122 07:01:33.212045 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8441ebc87a9f24a05f193a5a6751052ba2aa03143959049774cc8f59663ea7e" Jan 22 07:01:33 crc kubenswrapper[4982]: I0122 07:01:33.212549 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dsrxb" Jan 22 07:01:33 crc kubenswrapper[4982]: I0122 07:01:33.964653 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-dsrxb"] Jan 22 07:01:33 crc kubenswrapper[4982]: I0122 07:01:33.970923 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-dsrxb"] Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.140641 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-xp57f"] Jan 22 07:01:34 crc kubenswrapper[4982]: E0122 07:01:34.141104 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082f4f94-93bd-43b8-ba93-f985c04e60c7" containerName="storage" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.141123 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="082f4f94-93bd-43b8-ba93-f985c04e60c7" containerName="storage" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.141306 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="082f4f94-93bd-43b8-ba93-f985c04e60c7" containerName="storage" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.141840 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.146564 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.147031 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.147467 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.146575 4982 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-s9zg9" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.156564 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xp57f"] Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.262884 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb5sh\" (UniqueName: \"kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.262948 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.262975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.364720 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.364930 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb5sh\" (UniqueName: \"kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.365003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.365074 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.365820 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.387949 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb5sh\" (UniqueName: \"kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh\") pod \"crc-storage-crc-xp57f\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.479900 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:34 crc kubenswrapper[4982]: I0122 07:01:34.924690 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xp57f"] Jan 22 07:01:35 crc kubenswrapper[4982]: I0122 07:01:35.231456 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xp57f" event={"ID":"b398baf8-1450-4325-9996-2c600110fc8b","Type":"ContainerStarted","Data":"9ce306fffa13c2c512f3e08552dcf7e9533a44441e701b08e47c68ff85b96b9f"} Jan 22 07:01:35 crc kubenswrapper[4982]: I0122 07:01:35.730576 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082f4f94-93bd-43b8-ba93-f985c04e60c7" path="/var/lib/kubelet/pods/082f4f94-93bd-43b8-ba93-f985c04e60c7/volumes" Jan 22 07:01:38 crc kubenswrapper[4982]: I0122 07:01:38.251926 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xp57f" event={"ID":"b398baf8-1450-4325-9996-2c600110fc8b","Type":"ContainerStarted","Data":"41a155107f2e1d7624b17e9c423ca9efc5077610636c5fdc5a462c1734095119"} Jan 22 07:01:39 crc kubenswrapper[4982]: I0122 07:01:39.259974 4982 generic.go:334] "Generic (PLEG): container finished" podID="b398baf8-1450-4325-9996-2c600110fc8b" containerID="41a155107f2e1d7624b17e9c423ca9efc5077610636c5fdc5a462c1734095119" exitCode=0 Jan 22 07:01:39 crc kubenswrapper[4982]: I0122 07:01:39.260026 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xp57f" event={"ID":"b398baf8-1450-4325-9996-2c600110fc8b","Type":"ContainerDied","Data":"41a155107f2e1d7624b17e9c423ca9efc5077610636c5fdc5a462c1734095119"} Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.593956 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.766939 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb5sh\" (UniqueName: \"kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh\") pod \"b398baf8-1450-4325-9996-2c600110fc8b\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.767144 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt\") pod \"b398baf8-1450-4325-9996-2c600110fc8b\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.767197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage\") pod \"b398baf8-1450-4325-9996-2c600110fc8b\" (UID: \"b398baf8-1450-4325-9996-2c600110fc8b\") " Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.767212 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "b398baf8-1450-4325-9996-2c600110fc8b" (UID: "b398baf8-1450-4325-9996-2c600110fc8b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.767473 4982 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b398baf8-1450-4325-9996-2c600110fc8b-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.771784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh" (OuterVolumeSpecName: "kube-api-access-hb5sh") pod "b398baf8-1450-4325-9996-2c600110fc8b" (UID: "b398baf8-1450-4325-9996-2c600110fc8b"). InnerVolumeSpecName "kube-api-access-hb5sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.782723 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "b398baf8-1450-4325-9996-2c600110fc8b" (UID: "b398baf8-1450-4325-9996-2c600110fc8b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.869312 4982 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b398baf8-1450-4325-9996-2c600110fc8b-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:40 crc kubenswrapper[4982]: I0122 07:01:40.869348 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb5sh\" (UniqueName: \"kubernetes.io/projected/b398baf8-1450-4325-9996-2c600110fc8b-kube-api-access-hb5sh\") on node \"crc\" DevicePath \"\"" Jan 22 07:01:41 crc kubenswrapper[4982]: I0122 07:01:41.277077 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xp57f" event={"ID":"b398baf8-1450-4325-9996-2c600110fc8b","Type":"ContainerDied","Data":"9ce306fffa13c2c512f3e08552dcf7e9533a44441e701b08e47c68ff85b96b9f"} Jan 22 07:01:41 crc kubenswrapper[4982]: I0122 07:01:41.277120 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xp57f" Jan 22 07:01:41 crc kubenswrapper[4982]: I0122 07:01:41.277143 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ce306fffa13c2c512f3e08552dcf7e9533a44441e701b08e47c68ff85b96b9f" Jan 22 07:01:48 crc kubenswrapper[4982]: I0122 07:01:48.974264 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:01:48 crc kubenswrapper[4982]: I0122 07:01:48.974965 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:01:48 crc kubenswrapper[4982]: I0122 07:01:48.975051 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:01:48 crc kubenswrapper[4982]: I0122 07:01:48.976078 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:01:48 crc kubenswrapper[4982]: I0122 07:01:48.976200 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" gracePeriod=600 Jan 22 07:01:49 crc kubenswrapper[4982]: E0122 07:01:49.097316 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:01:49 crc kubenswrapper[4982]: I0122 07:01:49.331597 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" exitCode=0 Jan 22 07:01:49 crc kubenswrapper[4982]: I0122 07:01:49.331659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673"} Jan 22 07:01:49 crc kubenswrapper[4982]: I0122 07:01:49.331706 4982 scope.go:117] "RemoveContainer" containerID="0b646ab2c504c7ede8f740de5f61a1d91ecd8f99f3e53460c23f3d8a0da10a91" Jan 22 07:01:49 crc kubenswrapper[4982]: I0122 07:01:49.332367 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:01:49 crc kubenswrapper[4982]: E0122 07:01:49.332661 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:02:01 crc kubenswrapper[4982]: I0122 07:02:01.720470 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:02:01 crc kubenswrapper[4982]: E0122 07:02:01.721268 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:02:02 crc kubenswrapper[4982]: I0122 07:02:02.374325 4982 scope.go:117] "RemoveContainer" containerID="20a205c4d63be561d27569fcb88388244ef62aa7090609a26178e1437adb91e5" Jan 22 07:02:14 crc kubenswrapper[4982]: I0122 07:02:14.719507 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:02:14 crc kubenswrapper[4982]: E0122 07:02:14.720234 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:02:25 crc kubenswrapper[4982]: I0122 07:02:25.719947 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:02:25 crc kubenswrapper[4982]: E0122 07:02:25.721033 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:02:37 crc kubenswrapper[4982]: I0122 07:02:37.719750 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:02:37 crc kubenswrapper[4982]: E0122 07:02:37.720793 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:02:51 crc kubenswrapper[4982]: I0122 07:02:51.719570 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:02:51 crc kubenswrapper[4982]: E0122 07:02:51.720433 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:03:04 crc kubenswrapper[4982]: I0122 07:03:04.719544 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:03:04 crc kubenswrapper[4982]: E0122 07:03:04.720548 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:03:15 crc kubenswrapper[4982]: I0122 07:03:15.719299 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:03:15 crc kubenswrapper[4982]: E0122 07:03:15.720112 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:03:27 crc kubenswrapper[4982]: I0122 07:03:27.720669 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:03:27 crc kubenswrapper[4982]: E0122 07:03:27.721648 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:03:42 crc kubenswrapper[4982]: I0122 07:03:42.719940 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:03:42 crc kubenswrapper[4982]: E0122 07:03:42.721259 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:03:56 crc kubenswrapper[4982]: I0122 07:03:56.719633 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:03:56 crc kubenswrapper[4982]: E0122 07:03:56.720633 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:09 crc kubenswrapper[4982]: I0122 07:04:09.725260 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:04:09 crc kubenswrapper[4982]: E0122 07:04:09.726227 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:21 crc kubenswrapper[4982]: I0122 07:04:21.719903 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:04:21 crc kubenswrapper[4982]: E0122 07:04:21.721247 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.185923 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:04:31 crc kubenswrapper[4982]: E0122 07:04:31.187075 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b398baf8-1450-4325-9996-2c600110fc8b" containerName="storage" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.187094 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b398baf8-1450-4325-9996-2c600110fc8b" containerName="storage" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.187345 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b398baf8-1450-4325-9996-2c600110fc8b" containerName="storage" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.188589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.194307 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.315492 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z69rp\" (UniqueName: \"kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.315564 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.315657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.417259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z69rp\" (UniqueName: \"kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.417381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.417457 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.417980 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.417998 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.444391 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z69rp\" (UniqueName: \"kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp\") pod \"community-operators-92slm\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:31 crc kubenswrapper[4982]: I0122 07:04:31.528072 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:04:32 crc kubenswrapper[4982]: I0122 07:04:32.053168 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:04:32 crc kubenswrapper[4982]: I0122 07:04:32.719734 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:04:32 crc kubenswrapper[4982]: E0122 07:04:32.720311 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:33 crc kubenswrapper[4982]: I0122 07:04:33.019743 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerStarted","Data":"bedc44dfd8927a61049d5cce58c05941dd70f4c4e9bd9910f986050c2ce3dbf6"} Jan 22 07:04:39 crc kubenswrapper[4982]: I0122 07:04:39.075838 4982 generic.go:334] "Generic (PLEG): container finished" podID="31771ced-78f6-467a-84a5-483833137d65" containerID="d58600dc1974b61b17c348da4f6186f3f894423fc39851782ca9aa49675b34d6" exitCode=0 Jan 22 07:04:39 crc kubenswrapper[4982]: I0122 07:04:39.076072 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerDied","Data":"d58600dc1974b61b17c348da4f6186f3f894423fc39851782ca9aa49675b34d6"} Jan 22 07:04:43 crc kubenswrapper[4982]: I0122 07:04:43.719440 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:04:43 crc kubenswrapper[4982]: E0122 07:04:43.720184 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.087956 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.089071 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.091373 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.091462 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-59jx5" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.091591 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.092644 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.098146 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.109282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.109335 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5qtv\" (UniqueName: \"kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.109380 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.112114 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.118735 4982 generic.go:334] "Generic (PLEG): container finished" podID="31771ced-78f6-467a-84a5-483833137d65" containerID="41b707cdd2d471b5553aaf3d52b95a9ebd7e1cc8cf345c20dd09015ed36a2498" exitCode=0 Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.118795 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerDied","Data":"41b707cdd2d471b5553aaf3d52b95a9ebd7e1cc8cf345c20dd09015ed36a2498"} Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.210966 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5qtv\" (UniqueName: \"kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.211392 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.211530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.212632 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.212633 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.238691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5qtv\" (UniqueName: \"kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv\") pod \"dnsmasq-dns-95587bc99-bthgq\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.408974 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.439528 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.441108 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.450187 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.515577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bll2c\" (UniqueName: \"kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.515874 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.515916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.617584 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bll2c\" (UniqueName: \"kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.617659 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.617708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.618612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.619514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.640052 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bll2c\" (UniqueName: \"kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c\") pod \"dnsmasq-dns-5d79f765b5-wh42f\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.812466 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:44 crc kubenswrapper[4982]: I0122 07:04:44.923493 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:04:44 crc kubenswrapper[4982]: W0122 07:04:44.945269 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode57ac048_0818_49b6_8fe3_4fac5cce799c.slice/crio-a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea WatchSource:0}: Error finding container a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea: Status 404 returned error can't find the container with id a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.143042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-bthgq" event={"ID":"e57ac048-0818-49b6-8fe3-4fac5cce799c","Type":"ContainerStarted","Data":"a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea"} Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.220760 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:04:45 crc kubenswrapper[4982]: W0122 07:04:45.228954 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode62d2f00_b1ff_4158_991e_4203b5faeabe.slice/crio-21ddeb1f158fe380c9413e0f3460dcc9aa082ca07d205f1673430b5726359059 WatchSource:0}: Error finding container 21ddeb1f158fe380c9413e0f3460dcc9aa082ca07d205f1673430b5726359059: Status 404 returned error can't find the container with id 21ddeb1f158fe380c9413e0f3460dcc9aa082ca07d205f1673430b5726359059 Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.275613 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.277916 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.283725 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.285052 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.285171 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.285302 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.285340 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.285485 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-8vklg" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.434407 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.434827 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8m4l\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.434892 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.434921 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.434976 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.435006 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.435060 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.435133 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.435175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.536780 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.536835 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.536892 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.536956 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8m4l\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.536986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.537002 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.537028 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.537047 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.537081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.537745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.538027 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.539041 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.539925 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.540308 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.540342 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/18ab1ae5760f5c867850af584c1e8c3673fb2dd50102762695be9bc9bd399b5e/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.544616 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.544731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.580895 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.582452 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.585349 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.585689 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.585939 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-lxqc8" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.586036 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.591829 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.593396 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.644720 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.645460 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8m4l\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.660219 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " pod="openstack/rabbitmq-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.739993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740064 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740214 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740235 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740365 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740541 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc8n4\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.740580 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841492 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841559 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841606 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841629 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841732 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc8n4\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841771 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841793 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.841827 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.842205 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.843055 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.843349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.844412 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.845039 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.845112 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/479035c1ce5a5ab45fd87d58580fd034545ddc1ab761c2f7aacdb8ec650b38c0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.846456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.846478 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.846536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.870499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc8n4\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.872578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:45 crc kubenswrapper[4982]: I0122 07:04:45.953536 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.018930 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.154962 4982 generic.go:334] "Generic (PLEG): container finished" podID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerID="7c0e6228c913ab4ed1d72bad2e3187ebbcbfd42261644416cce8aa1d64ee0a98" exitCode=0 Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.155204 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" event={"ID":"e62d2f00-b1ff-4158-991e-4203b5faeabe","Type":"ContainerDied","Data":"7c0e6228c913ab4ed1d72bad2e3187ebbcbfd42261644416cce8aa1d64ee0a98"} Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.155281 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" event={"ID":"e62d2f00-b1ff-4158-991e-4203b5faeabe","Type":"ContainerStarted","Data":"21ddeb1f158fe380c9413e0f3460dcc9aa082ca07d205f1673430b5726359059"} Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.159640 4982 generic.go:334] "Generic (PLEG): container finished" podID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerID="2cd25ece81a7e80c92619a17f6bec429a2b0c3ba6b5aa5a3afe0ca9dbe70bb76" exitCode=0 Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.159698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-bthgq" event={"ID":"e57ac048-0818-49b6-8fe3-4fac5cce799c","Type":"ContainerDied","Data":"2cd25ece81a7e80c92619a17f6bec429a2b0c3ba6b5aa5a3afe0ca9dbe70bb76"} Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.379833 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: W0122 07:04:46.421410 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdfea17b_526b_43b7_b93b_bfb7342e4590.slice/crio-ea091077002f76f8e26ab3e939d6717123bb09d400b02a439abf626b1aba8e19 WatchSource:0}: Error finding container ea091077002f76f8e26ab3e939d6717123bb09d400b02a439abf626b1aba8e19: Status 404 returned error can't find the container with id ea091077002f76f8e26ab3e939d6717123bb09d400b02a439abf626b1aba8e19 Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.463506 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.465131 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.470125 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.470296 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.470417 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-xtlvc" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.470888 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.472736 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.476481 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.510199 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: W0122 07:04:46.520008 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5226590b_0c40_49b1_b368_54b9c4e1a741.slice/crio-426823258cf3ef918242748c727af6830aad8820e723a15bec87f3103fad5d9e WatchSource:0}: Error finding container 426823258cf3ef918242748c727af6830aad8820e723a15bec87f3103fad5d9e: Status 404 returned error can't find the container with id 426823258cf3ef918242748c727af6830aad8820e723a15bec87f3103fad5d9e Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551088 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-kolla-config\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551140 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9hg2\" (UniqueName: \"kubernetes.io/projected/02fcef15-a945-4200-8b5f-9697d0a2695f-kube-api-access-s9hg2\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-default\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551443 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551509 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.551556 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653224 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-default\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653286 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653371 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-kolla-config\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653498 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9hg2\" (UniqueName: \"kubernetes.io/projected/02fcef15-a945-4200-8b5f-9697d0a2695f-kube-api-access-s9hg2\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.653515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.654325 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-default\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.655059 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-kolla-config\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.655227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02fcef15-a945-4200-8b5f-9697d0a2695f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.656000 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.656038 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36c9308734bc411ab7c9d348361259a4508030998e878d47219f74d6602dfc7c/globalmount\"" pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.656398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/02fcef15-a945-4200-8b5f-9697d0a2695f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.851352 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.852354 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.854350 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-58tzv" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.854660 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.864476 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.958424 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzbq9\" (UniqueName: \"kubernetes.io/projected/b227581a-1612-4b75-849d-b7318781a24b-kube-api-access-jzbq9\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.958527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-config-data\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:46 crc kubenswrapper[4982]: I0122 07:04:46.958625 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-kolla-config\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.047396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.047396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9hg2\" (UniqueName: \"kubernetes.io/projected/02fcef15-a945-4200-8b5f-9697d0a2695f-kube-api-access-s9hg2\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.047461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fcef15-a945-4200-8b5f-9697d0a2695f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.059935 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzbq9\" (UniqueName: \"kubernetes.io/projected/b227581a-1612-4b75-849d-b7318781a24b-kube-api-access-jzbq9\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.060010 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-config-data\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.060086 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-kolla-config\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.060990 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-config-data\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.061013 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b227581a-1612-4b75-849d-b7318781a24b-kolla-config\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.146755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzbq9\" (UniqueName: \"kubernetes.io/projected/b227581a-1612-4b75-849d-b7318781a24b-kube-api-access-jzbq9\") pod \"memcached-0\" (UID: \"b227581a-1612-4b75-849d-b7318781a24b\") " pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.166575 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerStarted","Data":"426823258cf3ef918242748c727af6830aad8820e723a15bec87f3103fad5d9e"} Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.167922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerStarted","Data":"ea091077002f76f8e26ab3e939d6717123bb09d400b02a439abf626b1aba8e19"} Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.168099 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 07:04:47 crc kubenswrapper[4982]: E0122 07:04:47.172061 4982 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 22 07:04:47 crc kubenswrapper[4982]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e57ac048-0818-49b6-8fe3-4fac5cce799c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 22 07:04:47 crc kubenswrapper[4982]: > podSandboxID="a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea" Jan 22 07:04:47 crc kubenswrapper[4982]: E0122 07:04:47.172220 4982 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 07:04:47 crc kubenswrapper[4982]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8chc6h5bh56fh546hb7hc8h67h5bchffh577h697h5b5h5bdh59bhf6hf4h558hb5h578h595h5cchfbh644h59ch7fh654h547h587h5cbh5d5h8fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w5qtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95587bc99-bthgq_openstack(e57ac048-0818-49b6-8fe3-4fac5cce799c): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e57ac048-0818-49b6-8fe3-4fac5cce799c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 22 07:04:47 crc kubenswrapper[4982]: > logger="UnhandledError" Jan 22 07:04:47 crc kubenswrapper[4982]: E0122 07:04:47.173382 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e57ac048-0818-49b6-8fe3-4fac5cce799c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-95587bc99-bthgq" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.378617 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b78987ed-b6d1-4303-9ded-c9daa1dc8928\") pod \"openstack-galera-0\" (UID: \"02fcef15-a945-4200-8b5f-9697d0a2695f\") " pod="openstack/openstack-galera-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.472736 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.751467 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 07:04:47 crc kubenswrapper[4982]: I0122 07:04:47.926909 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.097845 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.099192 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.101505 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lvlhf" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.102358 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.102411 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.102681 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.115921 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.177933 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkgsv\" (UniqueName: \"kubernetes.io/projected/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kube-api-access-vkgsv\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178049 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178108 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178176 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178197 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-99544140-1a93-40c1-9b02-e0703b5300bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-99544140-1a93-40c1-9b02-e0703b5300bb\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.178305 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.179577 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b227581a-1612-4b75-849d-b7318781a24b","Type":"ContainerStarted","Data":"1b663e1821dfa0de73b2b7c5caec9ef4d5fe0db0dce6720807cb3ad9257b4aec"} Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279825 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279869 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkgsv\" (UniqueName: \"kubernetes.io/projected/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kube-api-access-vkgsv\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279895 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279936 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.279973 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.280001 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-99544140-1a93-40c1-9b02-e0703b5300bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-99544140-1a93-40c1-9b02-e0703b5300bb\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.280815 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.281126 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.281318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.281477 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.285693 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.285723 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-99544140-1a93-40c1-9b02-e0703b5300bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-99544140-1a93-40c1-9b02-e0703b5300bb\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/512ac8566bf9cebc8b0c272a5335485c56d6193d3dc71e17f16cf5c62d454602/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.645705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.645794 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.648155 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkgsv\" (UniqueName: \"kubernetes.io/projected/8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0-kube-api-access-vkgsv\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.675390 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-99544140-1a93-40c1-9b02-e0703b5300bb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-99544140-1a93-40c1-9b02-e0703b5300bb\") pod \"openstack-cell1-galera-0\" (UID: \"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:48 crc kubenswrapper[4982]: I0122 07:04:48.723897 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 07:04:50 crc kubenswrapper[4982]: W0122 07:04:50.402526 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02fcef15_a945_4200_8b5f_9697d0a2695f.slice/crio-1a0b922b579c22ec81e27aef0a9e5cb67eb4c99fd2e07981d0c24d3c3bf35f00 WatchSource:0}: Error finding container 1a0b922b579c22ec81e27aef0a9e5cb67eb4c99fd2e07981d0c24d3c3bf35f00: Status 404 returned error can't find the container with id 1a0b922b579c22ec81e27aef0a9e5cb67eb4c99fd2e07981d0c24d3c3bf35f00 Jan 22 07:04:50 crc kubenswrapper[4982]: I0122 07:04:50.919158 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 07:04:50 crc kubenswrapper[4982]: W0122 07:04:50.924250 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8221fbb5_ceb8_43a9_b0b8_5ee893afc8f0.slice/crio-6a7682277d4ba2b3d9cad53d468d937708f13d7cea3cd43e8a4f60882f4e72e5 WatchSource:0}: Error finding container 6a7682277d4ba2b3d9cad53d468d937708f13d7cea3cd43e8a4f60882f4e72e5: Status 404 returned error can't find the container with id 6a7682277d4ba2b3d9cad53d468d937708f13d7cea3cd43e8a4f60882f4e72e5 Jan 22 07:04:51 crc kubenswrapper[4982]: I0122 07:04:51.207802 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"02fcef15-a945-4200-8b5f-9697d0a2695f","Type":"ContainerStarted","Data":"1a0b922b579c22ec81e27aef0a9e5cb67eb4c99fd2e07981d0c24d3c3bf35f00"} Jan 22 07:04:51 crc kubenswrapper[4982]: I0122 07:04:51.209421 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0","Type":"ContainerStarted","Data":"6a7682277d4ba2b3d9cad53d468d937708f13d7cea3cd43e8a4f60882f4e72e5"} Jan 22 07:04:52 crc kubenswrapper[4982]: I0122 07:04:52.227714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" event={"ID":"e62d2f00-b1ff-4158-991e-4203b5faeabe","Type":"ContainerStarted","Data":"14802a996c4047fb4d680e6c1472d4d119f1eb5874e2279d3b0cd19c28eeb81a"} Jan 22 07:04:52 crc kubenswrapper[4982]: I0122 07:04:52.231049 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerStarted","Data":"32b2ffaeb997b17bc3b809b76b0c583a9fb06eae27374b7846a457d8e815c6f6"} Jan 22 07:04:52 crc kubenswrapper[4982]: I0122 07:04:52.232493 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerStarted","Data":"07e2cfac5e3447b5830561db01e65afbfd9f2f5f38aa8c5d288cf488a5a9f55a"} Jan 22 07:04:53 crc kubenswrapper[4982]: I0122 07:04:53.242171 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerStarted","Data":"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302"} Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.250678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-bthgq" event={"ID":"e57ac048-0818-49b6-8fe3-4fac5cce799c","Type":"ContainerStarted","Data":"1263e2dfd7d4e5f7e54cf90110e0f79b3cd3924b7d332b901e535cb9a369b62a"} Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.251173 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.252108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0","Type":"ContainerStarted","Data":"0931c80556342b7b5f5592083ba9a55c48a37a683467997cf9f2a7727c755595"} Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.253625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"02fcef15-a945-4200-8b5f-9697d0a2695f","Type":"ContainerStarted","Data":"236807a493675bf1dd2d7eae3a229ca96e3cf530c7761afbd694cc9bf9288fd3"} Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.256063 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b227581a-1612-4b75-849d-b7318781a24b","Type":"ContainerStarted","Data":"e0f333b57bbbda221231d06179763d2c622633e0ee0917865a398f1c8bc3a76f"} Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.256340 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.278435 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95587bc99-bthgq" podStartSLOduration=10.278416412 podStartE2EDuration="10.278416412s" podCreationTimestamp="2026-01-22 07:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:04:54.275138234 +0000 UTC m=+4755.113776277" watchObservedRunningTime="2026-01-22 07:04:54.278416412 +0000 UTC m=+4755.117054425" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.322326 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-92slm" podStartSLOduration=15.945124144 podStartE2EDuration="23.322306135s" podCreationTimestamp="2026-01-22 07:04:31 +0000 UTC" firstStartedPulling="2026-01-22 07:04:39.077787211 +0000 UTC m=+4739.916425254" lastFinishedPulling="2026-01-22 07:04:46.454969242 +0000 UTC m=+4747.293607245" observedRunningTime="2026-01-22 07:04:54.31654448 +0000 UTC m=+4755.155182483" watchObservedRunningTime="2026-01-22 07:04:54.322306135 +0000 UTC m=+4755.160944158" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.338976 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=8.338959074 podStartE2EDuration="8.338959074s" podCreationTimestamp="2026-01-22 07:04:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:04:54.331089661 +0000 UTC m=+4755.169727684" watchObservedRunningTime="2026-01-22 07:04:54.338959074 +0000 UTC m=+4755.177597077" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.447826 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" podStartSLOduration=10.447805227 podStartE2EDuration="10.447805227s" podCreationTimestamp="2026-01-22 07:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:04:54.443308287 +0000 UTC m=+4755.281946300" watchObservedRunningTime="2026-01-22 07:04:54.447805227 +0000 UTC m=+4755.286443230" Jan 22 07:04:54 crc kubenswrapper[4982]: I0122 07:04:54.812757 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:56 crc kubenswrapper[4982]: I0122 07:04:56.719765 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:04:56 crc kubenswrapper[4982]: E0122 07:04:56.720672 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:04:59 crc kubenswrapper[4982]: I0122 07:04:59.412006 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:04:59 crc kubenswrapper[4982]: I0122 07:04:59.814840 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:04:59 crc kubenswrapper[4982]: I0122 07:04:59.882016 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:05:00 crc kubenswrapper[4982]: I0122 07:05:00.304047 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95587bc99-bthgq" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="dnsmasq-dns" containerID="cri-o://1263e2dfd7d4e5f7e54cf90110e0f79b3cd3924b7d332b901e535cb9a369b62a" gracePeriod=10 Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.311864 4982 generic.go:334] "Generic (PLEG): container finished" podID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerID="1263e2dfd7d4e5f7e54cf90110e0f79b3cd3924b7d332b901e535cb9a369b62a" exitCode=0 Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.311956 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-bthgq" event={"ID":"e57ac048-0818-49b6-8fe3-4fac5cce799c","Type":"ContainerDied","Data":"1263e2dfd7d4e5f7e54cf90110e0f79b3cd3924b7d332b901e535cb9a369b62a"} Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.312185 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95587bc99-bthgq" event={"ID":"e57ac048-0818-49b6-8fe3-4fac5cce799c","Type":"ContainerDied","Data":"a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea"} Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.312201 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a647f4679ad28f5503bdccdb0274df9ae60240deb08cca5aa3e564254cffeeea" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.376014 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.501750 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc\") pod \"e57ac048-0818-49b6-8fe3-4fac5cce799c\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.501842 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config\") pod \"e57ac048-0818-49b6-8fe3-4fac5cce799c\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.501977 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5qtv\" (UniqueName: \"kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv\") pod \"e57ac048-0818-49b6-8fe3-4fac5cce799c\" (UID: \"e57ac048-0818-49b6-8fe3-4fac5cce799c\") " Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.507292 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv" (OuterVolumeSpecName: "kube-api-access-w5qtv") pod "e57ac048-0818-49b6-8fe3-4fac5cce799c" (UID: "e57ac048-0818-49b6-8fe3-4fac5cce799c"). InnerVolumeSpecName "kube-api-access-w5qtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.528549 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.529132 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.541806 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e57ac048-0818-49b6-8fe3-4fac5cce799c" (UID: "e57ac048-0818-49b6-8fe3-4fac5cce799c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.544586 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config" (OuterVolumeSpecName: "config") pod "e57ac048-0818-49b6-8fe3-4fac5cce799c" (UID: "e57ac048-0818-49b6-8fe3-4fac5cce799c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.579736 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.604529 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5qtv\" (UniqueName: \"kubernetes.io/projected/e57ac048-0818-49b6-8fe3-4fac5cce799c-kube-api-access-w5qtv\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.604575 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:01 crc kubenswrapper[4982]: I0122 07:05:01.604586 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e57ac048-0818-49b6-8fe3-4fac5cce799c-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.169398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.318187 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95587bc99-bthgq" Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.352310 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.374657 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95587bc99-bthgq"] Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.393450 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:02 crc kubenswrapper[4982]: I0122 07:05:02.470140 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:05:03 crc kubenswrapper[4982]: I0122 07:05:03.737611 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" path="/var/lib/kubelet/pods/e57ac048-0818-49b6-8fe3-4fac5cce799c/volumes" Jan 22 07:05:04 crc kubenswrapper[4982]: I0122 07:05:04.336919 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-92slm" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="registry-server" containerID="cri-o://32b2ffaeb997b17bc3b809b76b0c583a9fb06eae27374b7846a457d8e815c6f6" gracePeriod=2 Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.369046 4982 generic.go:334] "Generic (PLEG): container finished" podID="31771ced-78f6-467a-84a5-483833137d65" containerID="32b2ffaeb997b17bc3b809b76b0c583a9fb06eae27374b7846a457d8e815c6f6" exitCode=0 Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.369107 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerDied","Data":"32b2ffaeb997b17bc3b809b76b0c583a9fb06eae27374b7846a457d8e815c6f6"} Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.458756 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.606417 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content\") pod \"31771ced-78f6-467a-84a5-483833137d65\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.606588 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z69rp\" (UniqueName: \"kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp\") pod \"31771ced-78f6-467a-84a5-483833137d65\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.607552 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities" (OuterVolumeSpecName: "utilities") pod "31771ced-78f6-467a-84a5-483833137d65" (UID: "31771ced-78f6-467a-84a5-483833137d65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.606619 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities\") pod \"31771ced-78f6-467a-84a5-483833137d65\" (UID: \"31771ced-78f6-467a-84a5-483833137d65\") " Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.609464 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.612148 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp" (OuterVolumeSpecName: "kube-api-access-z69rp") pod "31771ced-78f6-467a-84a5-483833137d65" (UID: "31771ced-78f6-467a-84a5-483833137d65"). InnerVolumeSpecName "kube-api-access-z69rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.660542 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31771ced-78f6-467a-84a5-483833137d65" (UID: "31771ced-78f6-467a-84a5-483833137d65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.712189 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z69rp\" (UniqueName: \"kubernetes.io/projected/31771ced-78f6-467a-84a5-483833137d65-kube-api-access-z69rp\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:07 crc kubenswrapper[4982]: I0122 07:05:07.712226 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31771ced-78f6-467a-84a5-483833137d65-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.382238 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-92slm" event={"ID":"31771ced-78f6-467a-84a5-483833137d65","Type":"ContainerDied","Data":"bedc44dfd8927a61049d5cce58c05941dd70f4c4e9bd9910f986050c2ce3dbf6"} Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.382747 4982 scope.go:117] "RemoveContainer" containerID="32b2ffaeb997b17bc3b809b76b0c583a9fb06eae27374b7846a457d8e815c6f6" Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.382625 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-92slm" Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.402273 4982 scope.go:117] "RemoveContainer" containerID="41b707cdd2d471b5553aaf3d52b95a9ebd7e1cc8cf345c20dd09015ed36a2498" Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.407016 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.414267 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-92slm"] Jan 22 07:05:08 crc kubenswrapper[4982]: I0122 07:05:08.425745 4982 scope.go:117] "RemoveContainer" containerID="d58600dc1974b61b17c348da4f6186f3f894423fc39851782ca9aa49675b34d6" Jan 22 07:05:09 crc kubenswrapper[4982]: I0122 07:05:09.723304 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:05:09 crc kubenswrapper[4982]: E0122 07:05:09.723614 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:05:09 crc kubenswrapper[4982]: I0122 07:05:09.732138 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31771ced-78f6-467a-84a5-483833137d65" path="/var/lib/kubelet/pods/31771ced-78f6-467a-84a5-483833137d65/volumes" Jan 22 07:05:12 crc kubenswrapper[4982]: I0122 07:05:12.412893 4982 generic.go:334] "Generic (PLEG): container finished" podID="8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0" containerID="0931c80556342b7b5f5592083ba9a55c48a37a683467997cf9f2a7727c755595" exitCode=0 Jan 22 07:05:12 crc kubenswrapper[4982]: I0122 07:05:12.412981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0","Type":"ContainerDied","Data":"0931c80556342b7b5f5592083ba9a55c48a37a683467997cf9f2a7727c755595"} Jan 22 07:05:12 crc kubenswrapper[4982]: I0122 07:05:12.416114 4982 generic.go:334] "Generic (PLEG): container finished" podID="02fcef15-a945-4200-8b5f-9697d0a2695f" containerID="236807a493675bf1dd2d7eae3a229ca96e3cf530c7761afbd694cc9bf9288fd3" exitCode=0 Jan 22 07:05:12 crc kubenswrapper[4982]: I0122 07:05:12.416155 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"02fcef15-a945-4200-8b5f-9697d0a2695f","Type":"ContainerDied","Data":"236807a493675bf1dd2d7eae3a229ca96e3cf530c7761afbd694cc9bf9288fd3"} Jan 22 07:05:13 crc kubenswrapper[4982]: I0122 07:05:13.427110 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0","Type":"ContainerStarted","Data":"3f9bcd2b1b0be3a14a29749a8d4e451db62e2ddd5e3e0009f63869c70b81c9ae"} Jan 22 07:05:13 crc kubenswrapper[4982]: I0122 07:05:13.432022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"02fcef15-a945-4200-8b5f-9697d0a2695f","Type":"ContainerStarted","Data":"9e4ca8c31e3851de6a335be54acbd29257481a7c5b2cd412726732c5cc951228"} Jan 22 07:05:13 crc kubenswrapper[4982]: I0122 07:05:13.456615 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=26.456594117 podStartE2EDuration="26.456594117s" podCreationTimestamp="2026-01-22 07:04:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:13.45040421 +0000 UTC m=+4774.289042213" watchObservedRunningTime="2026-01-22 07:05:13.456594117 +0000 UTC m=+4774.295232120" Jan 22 07:05:13 crc kubenswrapper[4982]: I0122 07:05:13.472654 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=28.472630449 podStartE2EDuration="28.472630449s" podCreationTimestamp="2026-01-22 07:04:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:13.471788937 +0000 UTC m=+4774.310426940" watchObservedRunningTime="2026-01-22 07:05:13.472630449 +0000 UTC m=+4774.311268462" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.168924 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:15 crc kubenswrapper[4982]: E0122 07:05:15.169545 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="extract-content" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169558 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="extract-content" Jan 22 07:05:15 crc kubenswrapper[4982]: E0122 07:05:15.169577 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="init" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169583 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="init" Jan 22 07:05:15 crc kubenswrapper[4982]: E0122 07:05:15.169608 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="registry-server" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169615 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="registry-server" Jan 22 07:05:15 crc kubenswrapper[4982]: E0122 07:05:15.169622 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="dnsmasq-dns" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169627 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="dnsmasq-dns" Jan 22 07:05:15 crc kubenswrapper[4982]: E0122 07:05:15.169635 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="extract-utilities" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169641 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="extract-utilities" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169774 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="31771ced-78f6-467a-84a5-483833137d65" containerName="registry-server" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.169790 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57ac048-0818-49b6-8fe3-4fac5cce799c" containerName="dnsmasq-dns" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.170846 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.185579 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.329684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n9sw\" (UniqueName: \"kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.329762 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.329802 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.431580 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.431672 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.431770 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n9sw\" (UniqueName: \"kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.432626 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.432885 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.452676 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n9sw\" (UniqueName: \"kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw\") pod \"redhat-operators-qgc8l\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.489815 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:15 crc kubenswrapper[4982]: I0122 07:05:15.921183 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:15 crc kubenswrapper[4982]: W0122 07:05:15.934555 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod672f7110_2b22_4d01_afe4_da981ef61d52.slice/crio-172d85907738cda77b5325a879256cb1061cb140d04e5e136649737a460d18be WatchSource:0}: Error finding container 172d85907738cda77b5325a879256cb1061cb140d04e5e136649737a460d18be: Status 404 returned error can't find the container with id 172d85907738cda77b5325a879256cb1061cb140d04e5e136649737a460d18be Jan 22 07:05:16 crc kubenswrapper[4982]: I0122 07:05:16.455543 4982 generic.go:334] "Generic (PLEG): container finished" podID="672f7110-2b22-4d01-afe4-da981ef61d52" containerID="342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba" exitCode=0 Jan 22 07:05:16 crc kubenswrapper[4982]: I0122 07:05:16.455607 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerDied","Data":"342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba"} Jan 22 07:05:16 crc kubenswrapper[4982]: I0122 07:05:16.455638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerStarted","Data":"172d85907738cda77b5325a879256cb1061cb140d04e5e136649737a460d18be"} Jan 22 07:05:16 crc kubenswrapper[4982]: I0122 07:05:16.457410 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:05:17 crc kubenswrapper[4982]: I0122 07:05:17.472997 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 22 07:05:17 crc kubenswrapper[4982]: I0122 07:05:17.473240 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 22 07:05:18 crc kubenswrapper[4982]: I0122 07:05:18.724829 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 22 07:05:18 crc kubenswrapper[4982]: I0122 07:05:18.725177 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 22 07:05:19 crc kubenswrapper[4982]: I0122 07:05:19.482693 4982 generic.go:334] "Generic (PLEG): container finished" podID="672f7110-2b22-4d01-afe4-da981ef61d52" containerID="0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b" exitCode=0 Jan 22 07:05:19 crc kubenswrapper[4982]: I0122 07:05:19.482739 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerDied","Data":"0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b"} Jan 22 07:05:19 crc kubenswrapper[4982]: I0122 07:05:19.736530 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 22 07:05:19 crc kubenswrapper[4982]: I0122 07:05:19.817633 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0" containerName="galera" probeResult="failure" output=< Jan 22 07:05:19 crc kubenswrapper[4982]: wsrep_local_state_comment (Joined) differs from Synced Jan 22 07:05:19 crc kubenswrapper[4982]: > Jan 22 07:05:21 crc kubenswrapper[4982]: I0122 07:05:21.499461 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerStarted","Data":"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1"} Jan 22 07:05:21 crc kubenswrapper[4982]: I0122 07:05:21.524049 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qgc8l" podStartSLOduration=2.565383288 podStartE2EDuration="6.524028522s" podCreationTimestamp="2026-01-22 07:05:15 +0000 UTC" firstStartedPulling="2026-01-22 07:05:16.457147999 +0000 UTC m=+4777.295786012" lastFinishedPulling="2026-01-22 07:05:20.415793243 +0000 UTC m=+4781.254431246" observedRunningTime="2026-01-22 07:05:21.517927258 +0000 UTC m=+4782.356565311" watchObservedRunningTime="2026-01-22 07:05:21.524028522 +0000 UTC m=+4782.362666525" Jan 22 07:05:22 crc kubenswrapper[4982]: I0122 07:05:22.085238 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 22 07:05:22 crc kubenswrapper[4982]: I0122 07:05:22.175748 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 22 07:05:24 crc kubenswrapper[4982]: I0122 07:05:24.719623 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:05:24 crc kubenswrapper[4982]: E0122 07:05:24.720011 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.419874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-4gk79"] Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.421623 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.423539 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.446830 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4gk79"] Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.490783 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.490886 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.503918 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59pgd\" (UniqueName: \"kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.504047 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.532294 4982 generic.go:334] "Generic (PLEG): container finished" podID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerID="892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302" exitCode=0 Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.532396 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerDied","Data":"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302"} Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.534966 4982 generic.go:334] "Generic (PLEG): container finished" podID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerID="07e2cfac5e3447b5830561db01e65afbfd9f2f5f38aa8c5d288cf488a5a9f55a" exitCode=0 Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.535004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerDied","Data":"07e2cfac5e3447b5830561db01e65afbfd9f2f5f38aa8c5d288cf488a5a9f55a"} Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.605491 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.605632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59pgd\" (UniqueName: \"kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.609889 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.627194 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59pgd\" (UniqueName: \"kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd\") pod \"root-account-create-update-4gk79\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:25 crc kubenswrapper[4982]: I0122 07:05:25.750303 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.534063 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qgc8l" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="registry-server" probeResult="failure" output=< Jan 22 07:05:26 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:05:26 crc kubenswrapper[4982]: > Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.544158 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerStarted","Data":"02039c3a09b9be7d7884025ee211672bddcd565ebd87a89ff5e8998773c3ab46"} Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.544487 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.549253 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerStarted","Data":"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9"} Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.550008 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.591116 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=42.591068731 podStartE2EDuration="42.591068731s" podCreationTimestamp="2026-01-22 07:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:26.583577399 +0000 UTC m=+4787.422215422" watchObservedRunningTime="2026-01-22 07:05:26.591068731 +0000 UTC m=+4787.429706734" Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.606665 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=42.606644671 podStartE2EDuration="42.606644671s" podCreationTimestamp="2026-01-22 07:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:26.604260176 +0000 UTC m=+4787.442898179" watchObservedRunningTime="2026-01-22 07:05:26.606644671 +0000 UTC m=+4787.445282674" Jan 22 07:05:26 crc kubenswrapper[4982]: I0122 07:05:26.632043 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4gk79"] Jan 22 07:05:26 crc kubenswrapper[4982]: W0122 07:05:26.634593 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddc8ea6b_7232_4410_b2ee_70b8f977c1b0.slice/crio-c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672 WatchSource:0}: Error finding container c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672: Status 404 returned error can't find the container with id c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672 Jan 22 07:05:27 crc kubenswrapper[4982]: I0122 07:05:27.561492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4gk79" event={"ID":"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0","Type":"ContainerStarted","Data":"414e130d17050cae251992e48b4dbc1db06f79ef659a8750cf0ea3bd97886d46"} Jan 22 07:05:27 crc kubenswrapper[4982]: I0122 07:05:27.562027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4gk79" event={"ID":"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0","Type":"ContainerStarted","Data":"c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672"} Jan 22 07:05:28 crc kubenswrapper[4982]: I0122 07:05:28.569045 4982 generic.go:334] "Generic (PLEG): container finished" podID="ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" containerID="414e130d17050cae251992e48b4dbc1db06f79ef659a8750cf0ea3bd97886d46" exitCode=0 Jan 22 07:05:28 crc kubenswrapper[4982]: I0122 07:05:28.569095 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4gk79" event={"ID":"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0","Type":"ContainerDied","Data":"414e130d17050cae251992e48b4dbc1db06f79ef659a8750cf0ea3bd97886d46"} Jan 22 07:05:28 crc kubenswrapper[4982]: I0122 07:05:28.815481 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 22 07:05:29 crc kubenswrapper[4982]: I0122 07:05:29.919115 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:29 crc kubenswrapper[4982]: I0122 07:05:29.972757 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts\") pod \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " Jan 22 07:05:29 crc kubenswrapper[4982]: I0122 07:05:29.972917 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59pgd\" (UniqueName: \"kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd\") pod \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\" (UID: \"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0\") " Jan 22 07:05:29 crc kubenswrapper[4982]: I0122 07:05:29.973552 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" (UID: "ddc8ea6b-7232-4410-b2ee-70b8f977c1b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:05:29 crc kubenswrapper[4982]: I0122 07:05:29.980410 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd" (OuterVolumeSpecName: "kube-api-access-59pgd") pod "ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" (UID: "ddc8ea6b-7232-4410-b2ee-70b8f977c1b0"). InnerVolumeSpecName "kube-api-access-59pgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:05:30 crc kubenswrapper[4982]: I0122 07:05:30.075228 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:30 crc kubenswrapper[4982]: I0122 07:05:30.075276 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59pgd\" (UniqueName: \"kubernetes.io/projected/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0-kube-api-access-59pgd\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:30 crc kubenswrapper[4982]: I0122 07:05:30.583840 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4gk79" event={"ID":"ddc8ea6b-7232-4410-b2ee-70b8f977c1b0","Type":"ContainerDied","Data":"c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672"} Jan 22 07:05:30 crc kubenswrapper[4982]: I0122 07:05:30.584167 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c076c9561b818844767dde8009775cdabe5794a0bf5d1951b5c0a988c464b672" Jan 22 07:05:30 crc kubenswrapper[4982]: I0122 07:05:30.583914 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4gk79" Jan 22 07:05:35 crc kubenswrapper[4982]: I0122 07:05:35.530819 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:35 crc kubenswrapper[4982]: I0122 07:05:35.574094 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:35 crc kubenswrapper[4982]: I0122 07:05:35.719906 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:05:35 crc kubenswrapper[4982]: E0122 07:05:35.720111 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:05:35 crc kubenswrapper[4982]: I0122 07:05:35.770175 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:36 crc kubenswrapper[4982]: I0122 07:05:36.022093 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.241:5672: connect: connection refused" Jan 22 07:05:36 crc kubenswrapper[4982]: I0122 07:05:36.624042 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qgc8l" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="registry-server" containerID="cri-o://c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1" gracePeriod=2 Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.036522 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-4gk79"] Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.048936 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-4gk79"] Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.108663 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-dkh6j"] Jan 22 07:05:37 crc kubenswrapper[4982]: E0122 07:05:37.109069 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" containerName="mariadb-account-create-update" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.109089 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" containerName="mariadb-account-create-update" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.109348 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" containerName="mariadb-account-create-update" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.110182 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.112823 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.124064 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-dkh6j"] Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.279823 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.280166 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95444\" (UniqueName: \"kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.381653 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.381752 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95444\" (UniqueName: \"kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.383830 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.421423 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95444\" (UniqueName: \"kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444\") pod \"root-account-create-update-dkh6j\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.452337 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.627935 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.632675 4982 generic.go:334] "Generic (PLEG): container finished" podID="672f7110-2b22-4d01-afe4-da981ef61d52" containerID="c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1" exitCode=0 Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.632714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerDied","Data":"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1"} Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.632746 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qgc8l" event={"ID":"672f7110-2b22-4d01-afe4-da981ef61d52","Type":"ContainerDied","Data":"172d85907738cda77b5325a879256cb1061cb140d04e5e136649737a460d18be"} Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.632767 4982 scope.go:117] "RemoveContainer" containerID="c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.632963 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qgc8l" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.670307 4982 scope.go:117] "RemoveContainer" containerID="0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.708264 4982 scope.go:117] "RemoveContainer" containerID="342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.732777 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddc8ea6b-7232-4410-b2ee-70b8f977c1b0" path="/var/lib/kubelet/pods/ddc8ea6b-7232-4410-b2ee-70b8f977c1b0/volumes" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.739656 4982 scope.go:117] "RemoveContainer" containerID="c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1" Jan 22 07:05:37 crc kubenswrapper[4982]: E0122 07:05:37.740237 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1\": container with ID starting with c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1 not found: ID does not exist" containerID="c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.740269 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1"} err="failed to get container status \"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1\": rpc error: code = NotFound desc = could not find container \"c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1\": container with ID starting with c72cac8c5ed29875fe2e79f31aa13279812f456abb3f2ace13d0fd161e73ffd1 not found: ID does not exist" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.740290 4982 scope.go:117] "RemoveContainer" containerID="0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b" Jan 22 07:05:37 crc kubenswrapper[4982]: E0122 07:05:37.740896 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b\": container with ID starting with 0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b not found: ID does not exist" containerID="0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.740924 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b"} err="failed to get container status \"0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b\": rpc error: code = NotFound desc = could not find container \"0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b\": container with ID starting with 0f96ed4ebdb7daf6c8766fb1a879ba719e9e9633ab07d9a265807c4ac3607f7b not found: ID does not exist" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.740941 4982 scope.go:117] "RemoveContainer" containerID="342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba" Jan 22 07:05:37 crc kubenswrapper[4982]: E0122 07:05:37.741194 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba\": container with ID starting with 342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba not found: ID does not exist" containerID="342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.741227 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba"} err="failed to get container status \"342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba\": rpc error: code = NotFound desc = could not find container \"342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba\": container with ID starting with 342c597068cce0065daee9b5ea06674b29b01f91a9293ebfe0d9ad1473b47cba not found: ID does not exist" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.789595 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content\") pod \"672f7110-2b22-4d01-afe4-da981ef61d52\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.789672 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7n9sw\" (UniqueName: \"kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw\") pod \"672f7110-2b22-4d01-afe4-da981ef61d52\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.789756 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities\") pod \"672f7110-2b22-4d01-afe4-da981ef61d52\" (UID: \"672f7110-2b22-4d01-afe4-da981ef61d52\") " Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.790703 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities" (OuterVolumeSpecName: "utilities") pod "672f7110-2b22-4d01-afe4-da981ef61d52" (UID: "672f7110-2b22-4d01-afe4-da981ef61d52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.794834 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw" (OuterVolumeSpecName: "kube-api-access-7n9sw") pod "672f7110-2b22-4d01-afe4-da981ef61d52" (UID: "672f7110-2b22-4d01-afe4-da981ef61d52"). InnerVolumeSpecName "kube-api-access-7n9sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.891989 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7n9sw\" (UniqueName: \"kubernetes.io/projected/672f7110-2b22-4d01-afe4-da981ef61d52-kube-api-access-7n9sw\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.892338 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.914800 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "672f7110-2b22-4d01-afe4-da981ef61d52" (UID: "672f7110-2b22-4d01-afe4-da981ef61d52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.917823 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-dkh6j"] Jan 22 07:05:37 crc kubenswrapper[4982]: I0122 07:05:37.994298 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/672f7110-2b22-4d01-afe4-da981ef61d52-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:38 crc kubenswrapper[4982]: I0122 07:05:38.030760 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:38 crc kubenswrapper[4982]: I0122 07:05:38.039739 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qgc8l"] Jan 22 07:05:38 crc kubenswrapper[4982]: I0122 07:05:38.643329 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dkh6j" event={"ID":"44f14004-87c3-4799-bd61-c491d7e177f8","Type":"ContainerStarted","Data":"2ecc1a5801f67760fed62cc01f11d6a0511eb3ed7d34a1793695fb97242d2ab7"} Jan 22 07:05:38 crc kubenswrapper[4982]: I0122 07:05:38.643383 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dkh6j" event={"ID":"44f14004-87c3-4799-bd61-c491d7e177f8","Type":"ContainerStarted","Data":"4a92949a40640bd1058ba993b62f385604ad99dd73ebf8f5e96309cb99cd0371"} Jan 22 07:05:38 crc kubenswrapper[4982]: I0122 07:05:38.664655 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-dkh6j" podStartSLOduration=1.664632361 podStartE2EDuration="1.664632361s" podCreationTimestamp="2026-01-22 07:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:38.663248394 +0000 UTC m=+4799.501886397" watchObservedRunningTime="2026-01-22 07:05:38.664632361 +0000 UTC m=+4799.503270364" Jan 22 07:05:39 crc kubenswrapper[4982]: I0122 07:05:39.735393 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" path="/var/lib/kubelet/pods/672f7110-2b22-4d01-afe4-da981ef61d52/volumes" Jan 22 07:05:40 crc kubenswrapper[4982]: I0122 07:05:40.668306 4982 generic.go:334] "Generic (PLEG): container finished" podID="44f14004-87c3-4799-bd61-c491d7e177f8" containerID="2ecc1a5801f67760fed62cc01f11d6a0511eb3ed7d34a1793695fb97242d2ab7" exitCode=0 Jan 22 07:05:40 crc kubenswrapper[4982]: I0122 07:05:40.668814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dkh6j" event={"ID":"44f14004-87c3-4799-bd61-c491d7e177f8","Type":"ContainerDied","Data":"2ecc1a5801f67760fed62cc01f11d6a0511eb3ed7d34a1793695fb97242d2ab7"} Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.083741 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.259168 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts\") pod \"44f14004-87c3-4799-bd61-c491d7e177f8\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.259241 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95444\" (UniqueName: \"kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444\") pod \"44f14004-87c3-4799-bd61-c491d7e177f8\" (UID: \"44f14004-87c3-4799-bd61-c491d7e177f8\") " Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.260521 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "44f14004-87c3-4799-bd61-c491d7e177f8" (UID: "44f14004-87c3-4799-bd61-c491d7e177f8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.266699 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444" (OuterVolumeSpecName: "kube-api-access-95444") pod "44f14004-87c3-4799-bd61-c491d7e177f8" (UID: "44f14004-87c3-4799-bd61-c491d7e177f8"). InnerVolumeSpecName "kube-api-access-95444". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.362204 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f14004-87c3-4799-bd61-c491d7e177f8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.362280 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95444\" (UniqueName: \"kubernetes.io/projected/44f14004-87c3-4799-bd61-c491d7e177f8-kube-api-access-95444\") on node \"crc\" DevicePath \"\"" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.689901 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-dkh6j" event={"ID":"44f14004-87c3-4799-bd61-c491d7e177f8","Type":"ContainerDied","Data":"4a92949a40640bd1058ba993b62f385604ad99dd73ebf8f5e96309cb99cd0371"} Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.689956 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a92949a40640bd1058ba993b62f385604ad99dd73ebf8f5e96309cb99cd0371" Jan 22 07:05:42 crc kubenswrapper[4982]: I0122 07:05:42.689923 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-dkh6j" Jan 22 07:05:45 crc kubenswrapper[4982]: I0122 07:05:45.957107 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 07:05:46 crc kubenswrapper[4982]: I0122 07:05:46.021665 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:05:46 crc kubenswrapper[4982]: I0122 07:05:46.719120 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:05:46 crc kubenswrapper[4982]: E0122 07:05:46.719390 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.868708 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:05:51 crc kubenswrapper[4982]: E0122 07:05:51.869747 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="registry-server" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.869768 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="registry-server" Jan 22 07:05:51 crc kubenswrapper[4982]: E0122 07:05:51.869824 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="extract-utilities" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.869840 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="extract-utilities" Jan 22 07:05:51 crc kubenswrapper[4982]: E0122 07:05:51.869886 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="extract-content" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.869898 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="extract-content" Jan 22 07:05:51 crc kubenswrapper[4982]: E0122 07:05:51.869920 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44f14004-87c3-4799-bd61-c491d7e177f8" containerName="mariadb-account-create-update" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.869934 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="44f14004-87c3-4799-bd61-c491d7e177f8" containerName="mariadb-account-create-update" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.870152 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="672f7110-2b22-4d01-afe4-da981ef61d52" containerName="registry-server" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.870186 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="44f14004-87c3-4799-bd61-c491d7e177f8" containerName="mariadb-account-create-update" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.871432 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:51 crc kubenswrapper[4982]: I0122 07:05:51.896384 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.014336 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.014401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mbjm\" (UniqueName: \"kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.014431 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.116059 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.116117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mbjm\" (UniqueName: \"kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.116154 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.116922 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.117045 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.136065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mbjm\" (UniqueName: \"kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm\") pod \"dnsmasq-dns-699964fbc-hkgb4\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.200577 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.561185 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.702585 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:05:52 crc kubenswrapper[4982]: I0122 07:05:52.796580 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" event={"ID":"82cab6e1-6ef0-479b-a2f1-dc767439dbc1","Type":"ContainerStarted","Data":"1dbbe1f4154aed122560806bbeaa32cea06cf319601cd31dae430d7b7b8e3510"} Jan 22 07:05:53 crc kubenswrapper[4982]: I0122 07:05:53.231666 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:05:53 crc kubenswrapper[4982]: I0122 07:05:53.805587 4982 generic.go:334] "Generic (PLEG): container finished" podID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerID="f3643643bdfe1f401604e36359db714d6627d955edefbeaecc26c8077c27d153" exitCode=0 Jan 22 07:05:53 crc kubenswrapper[4982]: I0122 07:05:53.805657 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" event={"ID":"82cab6e1-6ef0-479b-a2f1-dc767439dbc1","Type":"ContainerDied","Data":"f3643643bdfe1f401604e36359db714d6627d955edefbeaecc26c8077c27d153"} Jan 22 07:05:54 crc kubenswrapper[4982]: I0122 07:05:54.460794 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="rabbitmq" containerID="cri-o://b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9" gracePeriod=604799 Jan 22 07:05:54 crc kubenswrapper[4982]: I0122 07:05:54.814465 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" event={"ID":"82cab6e1-6ef0-479b-a2f1-dc767439dbc1","Type":"ContainerStarted","Data":"c11c2fccf10eb86424599c0a3032cf869bc1543b03a8a1f69bdd20b3de6e881b"} Jan 22 07:05:54 crc kubenswrapper[4982]: I0122 07:05:54.814871 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:05:54 crc kubenswrapper[4982]: I0122 07:05:54.832800 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" podStartSLOduration=3.83278225 podStartE2EDuration="3.83278225s" podCreationTimestamp="2026-01-22 07:05:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:05:54.826711586 +0000 UTC m=+4815.665349589" watchObservedRunningTime="2026-01-22 07:05:54.83278225 +0000 UTC m=+4815.671420253" Jan 22 07:05:55 crc kubenswrapper[4982]: I0122 07:05:55.236214 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" containerID="cri-o://02039c3a09b9be7d7884025ee211672bddcd565ebd87a89ff5e8998773c3ab46" gracePeriod=604798 Jan 22 07:05:55 crc kubenswrapper[4982]: I0122 07:05:55.955187 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5672: connect: connection refused" Jan 22 07:05:56 crc kubenswrapper[4982]: I0122 07:05:56.019994 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.241:5672: connect: connection refused" Jan 22 07:06:00 crc kubenswrapper[4982]: I0122 07:06:00.719027 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:06:00 crc kubenswrapper[4982]: E0122 07:06:00.720816 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.354098 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.396787 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.396834 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.396976 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397025 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397065 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397127 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397170 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397199 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8m4l\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397226 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info\") pod \"fdfea17b-526b-43b7-b93b-bfb7342e4590\" (UID: \"fdfea17b-526b-43b7-b93b-bfb7342e4590\") " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.397919 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.398007 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.398501 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.404950 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l" (OuterVolumeSpecName: "kube-api-access-g8m4l") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "kube-api-access-g8m4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.410999 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info" (OuterVolumeSpecName: "pod-info") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.412046 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.426442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6" (OuterVolumeSpecName: "persistence") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "pvc-872922c5-1056-47e5-85fe-83377239f8f6". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.453276 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf" (OuterVolumeSpecName: "server-conf") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.497334 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "fdfea17b-526b-43b7-b93b-bfb7342e4590" (UID: "fdfea17b-526b-43b7-b93b-bfb7342e4590"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499246 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499282 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8m4l\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-kube-api-access-g8m4l\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499298 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fdfea17b-526b-43b7-b93b-bfb7342e4590-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499311 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499322 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499360 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") on node \"crc\" " Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499378 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fdfea17b-526b-43b7-b93b-bfb7342e4590-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499391 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fdfea17b-526b-43b7-b93b-bfb7342e4590-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.499401 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fdfea17b-526b-43b7-b93b-bfb7342e4590-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.515041 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.515200 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-872922c5-1056-47e5-85fe-83377239f8f6" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6") on node "crc" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.601025 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.874301 4982 generic.go:334] "Generic (PLEG): container finished" podID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerID="02039c3a09b9be7d7884025ee211672bddcd565ebd87a89ff5e8998773c3ab46" exitCode=0 Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.874396 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerDied","Data":"02039c3a09b9be7d7884025ee211672bddcd565ebd87a89ff5e8998773c3ab46"} Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.877720 4982 generic.go:334] "Generic (PLEG): container finished" podID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerID="b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9" exitCode=0 Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.877783 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerDied","Data":"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9"} Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.877815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fdfea17b-526b-43b7-b93b-bfb7342e4590","Type":"ContainerDied","Data":"ea091077002f76f8e26ab3e939d6717123bb09d400b02a439abf626b1aba8e19"} Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.877846 4982 scope.go:117] "RemoveContainer" containerID="b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.877967 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.908277 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.919640 4982 scope.go:117] "RemoveContainer" containerID="892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.921447 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.946874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:06:01 crc kubenswrapper[4982]: E0122 07:06:01.949084 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="rabbitmq" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.949113 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="rabbitmq" Jan 22 07:06:01 crc kubenswrapper[4982]: E0122 07:06:01.949157 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="setup-container" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.949165 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="setup-container" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.949404 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" containerName="rabbitmq" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.950246 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.952931 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-8vklg" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.953091 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.953201 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.953369 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.953923 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 07:06:01 crc kubenswrapper[4982]: I0122 07:06:01.977299 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r926q\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-kube-api-access-r926q\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007163 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007214 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0e26bf89-f462-40be-8b10-8fb6e1507bf8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007245 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0e26bf89-f462-40be-8b10-8fb6e1507bf8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007340 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.007369 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108638 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r926q\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-kube-api-access-r926q\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108670 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0e26bf89-f462-40be-8b10-8fb6e1507bf8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108747 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108767 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0e26bf89-f462-40be-8b10-8fb6e1507bf8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.108820 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.109350 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.109376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.110921 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.111460 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0e26bf89-f462-40be-8b10-8fb6e1507bf8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.112558 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.112608 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/18ab1ae5760f5c867850af584c1e8c3673fb2dd50102762695be9bc9bd399b5e/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.144451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0e26bf89-f462-40be-8b10-8fb6e1507bf8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.146044 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r926q\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-kube-api-access-r926q\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.151419 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0e26bf89-f462-40be-8b10-8fb6e1507bf8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.152105 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0e26bf89-f462-40be-8b10-8fb6e1507bf8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.162832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-872922c5-1056-47e5-85fe-83377239f8f6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-872922c5-1056-47e5-85fe-83377239f8f6\") pod \"rabbitmq-server-0\" (UID: \"0e26bf89-f462-40be-8b10-8fb6e1507bf8\") " pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.203101 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.238969 4982 scope.go:117] "RemoveContainer" containerID="b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9" Jan 22 07:06:02 crc kubenswrapper[4982]: E0122 07:06:02.239405 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9\": container with ID starting with b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9 not found: ID does not exist" containerID="b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.239464 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9"} err="failed to get container status \"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9\": rpc error: code = NotFound desc = could not find container \"b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9\": container with ID starting with b2deb2e89cfebe5d8ca644c16c8ad8dd0490ce220f0e2815e686600dbe86caa9 not found: ID does not exist" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.239500 4982 scope.go:117] "RemoveContainer" containerID="892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302" Jan 22 07:06:02 crc kubenswrapper[4982]: E0122 07:06:02.239784 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302\": container with ID starting with 892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302 not found: ID does not exist" containerID="892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.239821 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302"} err="failed to get container status \"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302\": rpc error: code = NotFound desc = could not find container \"892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302\": container with ID starting with 892482b97cdfabfccd52afa11003f1f971912c25a4f31653dc404213c7c03302 not found: ID does not exist" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.276278 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.276588 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="dnsmasq-dns" containerID="cri-o://14802a996c4047fb4d680e6c1472d4d119f1eb5874e2279d3b0cd19c28eeb81a" gracePeriod=10 Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.297042 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.315220 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428068 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428145 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428169 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428227 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428368 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428399 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428437 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428470 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.428487 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc8n4\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.431530 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.433606 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.434078 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.438186 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4" (OuterVolumeSpecName: "kube-api-access-qc8n4") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "kube-api-access-qc8n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.444332 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info" (OuterVolumeSpecName: "pod-info") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.444401 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.451711 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf" (OuterVolumeSpecName: "server-conf") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.464462 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914" (OuterVolumeSpecName: "persistence") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "pvc-e0769149-4631-4cc7-801a-7363911a5914". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.529174 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.529582 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") pod \"5226590b-0c40-49b1-b368-54b9c4e1a741\" (UID: \"5226590b-0c40-49b1-b368-54b9c4e1a741\") " Jan 22 07:06:02 crc kubenswrapper[4982]: W0122 07:06:02.529729 4982 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/5226590b-0c40-49b1-b368-54b9c4e1a741/volumes/kubernetes.io~projected/rabbitmq-confd Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.529796 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5226590b-0c40-49b1-b368-54b9c4e1a741" (UID: "5226590b-0c40-49b1-b368-54b9c4e1a741"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530132 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530162 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") on node \"crc\" " Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530194 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530206 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5226590b-0c40-49b1-b368-54b9c4e1a741-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530217 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5226590b-0c40-49b1-b368-54b9c4e1a741-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530228 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc8n4\" (UniqueName: \"kubernetes.io/projected/5226590b-0c40-49b1-b368-54b9c4e1a741-kube-api-access-qc8n4\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530237 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530245 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5226590b-0c40-49b1-b368-54b9c4e1a741-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.530255 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5226590b-0c40-49b1-b368-54b9c4e1a741-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.543777 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.543918 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e0769149-4631-4cc7-801a-7363911a5914" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914") on node "crc" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.631096 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.746023 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.886309 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0e26bf89-f462-40be-8b10-8fb6e1507bf8","Type":"ContainerStarted","Data":"7cf4467358ce0d59a835ec834abd271cc6ba01ac0a710da0d1bfddc581598f10"} Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.888509 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5226590b-0c40-49b1-b368-54b9c4e1a741","Type":"ContainerDied","Data":"426823258cf3ef918242748c727af6830aad8820e723a15bec87f3103fad5d9e"} Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.888526 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.888570 4982 scope.go:117] "RemoveContainer" containerID="02039c3a09b9be7d7884025ee211672bddcd565ebd87a89ff5e8998773c3ab46" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.890315 4982 generic.go:334] "Generic (PLEG): container finished" podID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerID="14802a996c4047fb4d680e6c1472d4d119f1eb5874e2279d3b0cd19c28eeb81a" exitCode=0 Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.890356 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" event={"ID":"e62d2f00-b1ff-4158-991e-4203b5faeabe","Type":"ContainerDied","Data":"14802a996c4047fb4d680e6c1472d4d119f1eb5874e2279d3b0cd19c28eeb81a"} Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.903954 4982 scope.go:117] "RemoveContainer" containerID="07e2cfac5e3447b5830561db01e65afbfd9f2f5f38aa8c5d288cf488a5a9f55a" Jan 22 07:06:02 crc kubenswrapper[4982]: I0122 07:06:02.998077 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.004354 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.026442 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:06:03 crc kubenswrapper[4982]: E0122 07:06:03.026890 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="setup-container" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.026919 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="setup-container" Jan 22 07:06:03 crc kubenswrapper[4982]: E0122 07:06:03.026970 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.026982 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.027238 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" containerName="rabbitmq" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.028308 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.037230 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.038485 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.038555 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.041138 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-lxqc8" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.041183 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.041144 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.139287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.139354 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.139380 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.139826 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.139946 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtcs6\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-kube-api-access-mtcs6\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.140019 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.140093 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7214d601-3cd6-4365-981f-2d254e99d620-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.140133 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7214d601-3cd6-4365-981f-2d254e99d620-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.140218 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtcs6\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-kube-api-access-mtcs6\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241293 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7214d601-3cd6-4365-981f-2d254e99d620-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7214d601-3cd6-4365-981f-2d254e99d620-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241338 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241391 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.241411 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.242099 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.242385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.242421 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.242695 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7214d601-3cd6-4365-981f-2d254e99d620-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.244188 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.244222 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/479035c1ce5a5ab45fd87d58580fd034545ddc1ab761c2f7aacdb8ec650b38c0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.544827 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7214d601-3cd6-4365-981f-2d254e99d620-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.545456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtcs6\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-kube-api-access-mtcs6\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.552245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7214d601-3cd6-4365-981f-2d254e99d620-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.552578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7214d601-3cd6-4365-981f-2d254e99d620-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.718082 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.733923 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5226590b-0c40-49b1-b368-54b9c4e1a741" path="/var/lib/kubelet/pods/5226590b-0c40-49b1-b368-54b9c4e1a741/volumes" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.735457 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdfea17b-526b-43b7-b93b-bfb7342e4590" path="/var/lib/kubelet/pods/fdfea17b-526b-43b7-b93b-bfb7342e4590/volumes" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.772129 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0769149-4631-4cc7-801a-7363911a5914\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0769149-4631-4cc7-801a-7363911a5914\") pod \"rabbitmq-cell1-server-0\" (UID: \"7214d601-3cd6-4365-981f-2d254e99d620\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.849998 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config\") pod \"e62d2f00-b1ff-4158-991e-4203b5faeabe\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.850194 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bll2c\" (UniqueName: \"kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c\") pod \"e62d2f00-b1ff-4158-991e-4203b5faeabe\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.850251 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") pod \"e62d2f00-b1ff-4158-991e-4203b5faeabe\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.854462 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c" (OuterVolumeSpecName: "kube-api-access-bll2c") pod "e62d2f00-b1ff-4158-991e-4203b5faeabe" (UID: "e62d2f00-b1ff-4158-991e-4203b5faeabe"). InnerVolumeSpecName "kube-api-access-bll2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:03 crc kubenswrapper[4982]: E0122 07:06:03.888261 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc podName:e62d2f00-b1ff-4158-991e-4203b5faeabe nodeName:}" failed. No retries permitted until 2026-01-22 07:06:04.388238023 +0000 UTC m=+4825.226876036 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc") pod "e62d2f00-b1ff-4158-991e-4203b5faeabe" (UID: "e62d2f00-b1ff-4158-991e-4203b5faeabe") : error deleting /var/lib/kubelet/pods/e62d2f00-b1ff-4158-991e-4203b5faeabe/volume-subpaths: remove /var/lib/kubelet/pods/e62d2f00-b1ff-4158-991e-4203b5faeabe/volume-subpaths: no such file or directory Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.889105 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config" (OuterVolumeSpecName: "config") pod "e62d2f00-b1ff-4158-991e-4203b5faeabe" (UID: "e62d2f00-b1ff-4158-991e-4203b5faeabe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.905019 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" event={"ID":"e62d2f00-b1ff-4158-991e-4203b5faeabe","Type":"ContainerDied","Data":"21ddeb1f158fe380c9413e0f3460dcc9aa082ca07d205f1673430b5726359059"} Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.905097 4982 scope.go:117] "RemoveContainer" containerID="14802a996c4047fb4d680e6c1472d4d119f1eb5874e2279d3b0cd19c28eeb81a" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.905121 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-wh42f" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.952332 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bll2c\" (UniqueName: \"kubernetes.io/projected/e62d2f00-b1ff-4158-991e-4203b5faeabe-kube-api-access-bll2c\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.952382 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.955323 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:03 crc kubenswrapper[4982]: I0122 07:06:03.971127 4982 scope.go:117] "RemoveContainer" containerID="7c0e6228c913ab4ed1d72bad2e3187ebbcbfd42261644416cce8aa1d64ee0a98" Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.476938 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") pod \"e62d2f00-b1ff-4158-991e-4203b5faeabe\" (UID: \"e62d2f00-b1ff-4158-991e-4203b5faeabe\") " Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.477474 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e62d2f00-b1ff-4158-991e-4203b5faeabe" (UID: "e62d2f00-b1ff-4158-991e-4203b5faeabe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.539918 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.548827 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-wh42f"] Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.564208 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.578508 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e62d2f00-b1ff-4158-991e-4203b5faeabe-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.919875 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7214d601-3cd6-4365-981f-2d254e99d620","Type":"ContainerStarted","Data":"5600229931b28cdec97d5170d7911c4b57cf828d4dbf57212be587c744d7c12f"} Jan 22 07:06:04 crc kubenswrapper[4982]: I0122 07:06:04.926526 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0e26bf89-f462-40be-8b10-8fb6e1507bf8","Type":"ContainerStarted","Data":"5dd3c2062103565610aaff2e463098aae5294eefb63e7dd2f5007c50f25a1009"} Jan 22 07:06:05 crc kubenswrapper[4982]: I0122 07:06:05.736164 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" path="/var/lib/kubelet/pods/e62d2f00-b1ff-4158-991e-4203b5faeabe/volumes" Jan 22 07:06:05 crc kubenswrapper[4982]: I0122 07:06:05.937314 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7214d601-3cd6-4365-981f-2d254e99d620","Type":"ContainerStarted","Data":"83987c89554b3e28053a96ace8981d5000e431f197d5d2d9a8bacab6a15ac359"} Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.358914 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:11 crc kubenswrapper[4982]: E0122 07:06:11.360029 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="init" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.360051 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="init" Jan 22 07:06:11 crc kubenswrapper[4982]: E0122 07:06:11.360084 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="dnsmasq-dns" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.360095 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="dnsmasq-dns" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.360349 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e62d2f00-b1ff-4158-991e-4203b5faeabe" containerName="dnsmasq-dns" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.361825 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.379732 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.493760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgjt4\" (UniqueName: \"kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.493813 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.493979 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.595651 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.595800 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgjt4\" (UniqueName: \"kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.595866 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.596382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.596418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.615425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgjt4\" (UniqueName: \"kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4\") pod \"redhat-marketplace-9dz95\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:11 crc kubenswrapper[4982]: I0122 07:06:11.700286 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:12 crc kubenswrapper[4982]: I0122 07:06:12.006836 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:12 crc kubenswrapper[4982]: W0122 07:06:12.009874 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c0ef41d_af92_40c8_b170_e2c0c76755ca.slice/crio-7f6d2a481408f28ca23fd9d62d3edd3d76aeca90a67852c867ef3d745bca0f63 WatchSource:0}: Error finding container 7f6d2a481408f28ca23fd9d62d3edd3d76aeca90a67852c867ef3d745bca0f63: Status 404 returned error can't find the container with id 7f6d2a481408f28ca23fd9d62d3edd3d76aeca90a67852c867ef3d745bca0f63 Jan 22 07:06:13 crc kubenswrapper[4982]: I0122 07:06:13.002540 4982 generic.go:334] "Generic (PLEG): container finished" podID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerID="bb8e0f527e74f1ef0c421e101517f82fc10ceeceb99af09ef492ebcf4bce98c9" exitCode=0 Jan 22 07:06:13 crc kubenswrapper[4982]: I0122 07:06:13.006592 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerDied","Data":"bb8e0f527e74f1ef0c421e101517f82fc10ceeceb99af09ef492ebcf4bce98c9"} Jan 22 07:06:13 crc kubenswrapper[4982]: I0122 07:06:13.006904 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerStarted","Data":"7f6d2a481408f28ca23fd9d62d3edd3d76aeca90a67852c867ef3d745bca0f63"} Jan 22 07:06:15 crc kubenswrapper[4982]: I0122 07:06:15.019403 4982 generic.go:334] "Generic (PLEG): container finished" podID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerID="cec68363c3f2db154e569c39e085a78173bed7b2afc58dbbd20d3feb71a6d20f" exitCode=0 Jan 22 07:06:15 crc kubenswrapper[4982]: I0122 07:06:15.019516 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerDied","Data":"cec68363c3f2db154e569c39e085a78173bed7b2afc58dbbd20d3feb71a6d20f"} Jan 22 07:06:15 crc kubenswrapper[4982]: I0122 07:06:15.719250 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:06:15 crc kubenswrapper[4982]: E0122 07:06:15.719547 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:06:17 crc kubenswrapper[4982]: I0122 07:06:17.040044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerStarted","Data":"acca4d916d8d364a5f09160a2bae6cdfd20b4a561ef6eab078ec1eca48667e46"} Jan 22 07:06:17 crc kubenswrapper[4982]: I0122 07:06:17.065966 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9dz95" podStartSLOduration=2.838598539 podStartE2EDuration="6.065943003s" podCreationTimestamp="2026-01-22 07:06:11 +0000 UTC" firstStartedPulling="2026-01-22 07:06:13.00948661 +0000 UTC m=+4833.848124603" lastFinishedPulling="2026-01-22 07:06:16.236831044 +0000 UTC m=+4837.075469067" observedRunningTime="2026-01-22 07:06:17.064831563 +0000 UTC m=+4837.903469576" watchObservedRunningTime="2026-01-22 07:06:17.065943003 +0000 UTC m=+4837.904581006" Jan 22 07:06:21 crc kubenswrapper[4982]: I0122 07:06:21.700749 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:21 crc kubenswrapper[4982]: I0122 07:06:21.702454 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:21 crc kubenswrapper[4982]: I0122 07:06:21.751480 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:22 crc kubenswrapper[4982]: I0122 07:06:22.139134 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:22 crc kubenswrapper[4982]: I0122 07:06:22.193546 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:24 crc kubenswrapper[4982]: I0122 07:06:24.101438 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9dz95" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="registry-server" containerID="cri-o://acca4d916d8d364a5f09160a2bae6cdfd20b4a561ef6eab078ec1eca48667e46" gracePeriod=2 Jan 22 07:06:25 crc kubenswrapper[4982]: I0122 07:06:25.148956 4982 generic.go:334] "Generic (PLEG): container finished" podID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerID="acca4d916d8d364a5f09160a2bae6cdfd20b4a561ef6eab078ec1eca48667e46" exitCode=0 Jan 22 07:06:25 crc kubenswrapper[4982]: I0122 07:06:25.149207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerDied","Data":"acca4d916d8d364a5f09160a2bae6cdfd20b4a561ef6eab078ec1eca48667e46"} Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.882938 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.934446 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgjt4\" (UniqueName: \"kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4\") pod \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.934536 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities\") pod \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.934637 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content\") pod \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\" (UID: \"4c0ef41d-af92-40c8-b170-e2c0c76755ca\") " Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.937221 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities" (OuterVolumeSpecName: "utilities") pod "4c0ef41d-af92-40c8-b170-e2c0c76755ca" (UID: "4c0ef41d-af92-40c8-b170-e2c0c76755ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.943131 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4" (OuterVolumeSpecName: "kube-api-access-lgjt4") pod "4c0ef41d-af92-40c8-b170-e2c0c76755ca" (UID: "4c0ef41d-af92-40c8-b170-e2c0c76755ca"). InnerVolumeSpecName "kube-api-access-lgjt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:06:26 crc kubenswrapper[4982]: I0122 07:06:26.961498 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c0ef41d-af92-40c8-b170-e2c0c76755ca" (UID: "4c0ef41d-af92-40c8-b170-e2c0c76755ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.035988 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.036030 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c0ef41d-af92-40c8-b170-e2c0c76755ca-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.036041 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgjt4\" (UniqueName: \"kubernetes.io/projected/4c0ef41d-af92-40c8-b170-e2c0c76755ca-kube-api-access-lgjt4\") on node \"crc\" DevicePath \"\"" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.173145 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9dz95" event={"ID":"4c0ef41d-af92-40c8-b170-e2c0c76755ca","Type":"ContainerDied","Data":"7f6d2a481408f28ca23fd9d62d3edd3d76aeca90a67852c867ef3d745bca0f63"} Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.173225 4982 scope.go:117] "RemoveContainer" containerID="acca4d916d8d364a5f09160a2bae6cdfd20b4a561ef6eab078ec1eca48667e46" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.173229 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9dz95" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.224273 4982 scope.go:117] "RemoveContainer" containerID="cec68363c3f2db154e569c39e085a78173bed7b2afc58dbbd20d3feb71a6d20f" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.230265 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.240082 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9dz95"] Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.254580 4982 scope.go:117] "RemoveContainer" containerID="bb8e0f527e74f1ef0c421e101517f82fc10ceeceb99af09ef492ebcf4bce98c9" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.719281 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:06:27 crc kubenswrapper[4982]: E0122 07:06:27.721383 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:06:27 crc kubenswrapper[4982]: I0122 07:06:27.731558 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" path="/var/lib/kubelet/pods/4c0ef41d-af92-40c8-b170-e2c0c76755ca/volumes" Jan 22 07:06:37 crc kubenswrapper[4982]: I0122 07:06:37.251731 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e26bf89-f462-40be-8b10-8fb6e1507bf8" containerID="5dd3c2062103565610aaff2e463098aae5294eefb63e7dd2f5007c50f25a1009" exitCode=0 Jan 22 07:06:37 crc kubenswrapper[4982]: I0122 07:06:37.251821 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0e26bf89-f462-40be-8b10-8fb6e1507bf8","Type":"ContainerDied","Data":"5dd3c2062103565610aaff2e463098aae5294eefb63e7dd2f5007c50f25a1009"} Jan 22 07:06:38 crc kubenswrapper[4982]: I0122 07:06:38.263003 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0e26bf89-f462-40be-8b10-8fb6e1507bf8","Type":"ContainerStarted","Data":"ac788db2fd26d4da0addf5dfc61d73ab301f18779a5d73dc959502fcdb72cdf6"} Jan 22 07:06:38 crc kubenswrapper[4982]: I0122 07:06:38.263575 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 07:06:38 crc kubenswrapper[4982]: I0122 07:06:38.310361 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.310348164 podStartE2EDuration="37.310348164s" podCreationTimestamp="2026-01-22 07:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:06:38.306922261 +0000 UTC m=+4859.145560264" watchObservedRunningTime="2026-01-22 07:06:38.310348164 +0000 UTC m=+4859.148986167" Jan 22 07:06:39 crc kubenswrapper[4982]: I0122 07:06:39.277151 4982 generic.go:334] "Generic (PLEG): container finished" podID="7214d601-3cd6-4365-981f-2d254e99d620" containerID="83987c89554b3e28053a96ace8981d5000e431f197d5d2d9a8bacab6a15ac359" exitCode=0 Jan 22 07:06:39 crc kubenswrapper[4982]: I0122 07:06:39.277261 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7214d601-3cd6-4365-981f-2d254e99d620","Type":"ContainerDied","Data":"83987c89554b3e28053a96ace8981d5000e431f197d5d2d9a8bacab6a15ac359"} Jan 22 07:06:40 crc kubenswrapper[4982]: I0122 07:06:40.285285 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7214d601-3cd6-4365-981f-2d254e99d620","Type":"ContainerStarted","Data":"82938ae0325732cd4f636c27d1a8ad962724ab7ff95c7bd55417db7738b67d2a"} Jan 22 07:06:40 crc kubenswrapper[4982]: I0122 07:06:40.285880 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:06:40 crc kubenswrapper[4982]: I0122 07:06:40.314368 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.314345802 podStartE2EDuration="38.314345802s" podCreationTimestamp="2026-01-22 07:06:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:06:40.308478574 +0000 UTC m=+4861.147116567" watchObservedRunningTime="2026-01-22 07:06:40.314345802 +0000 UTC m=+4861.152983845" Jan 22 07:06:41 crc kubenswrapper[4982]: I0122 07:06:41.719380 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:06:41 crc kubenswrapper[4982]: E0122 07:06:41.719636 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:06:52 crc kubenswrapper[4982]: I0122 07:06:52.301013 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 07:06:52 crc kubenswrapper[4982]: I0122 07:06:52.719198 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:06:53 crc kubenswrapper[4982]: I0122 07:06:53.411484 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b"} Jan 22 07:06:53 crc kubenswrapper[4982]: I0122 07:06:53.960142 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.501296 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:04 crc kubenswrapper[4982]: E0122 07:07:04.502782 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="extract-content" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.502813 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="extract-content" Jan 22 07:07:04 crc kubenswrapper[4982]: E0122 07:07:04.502833 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="registry-server" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.502846 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="registry-server" Jan 22 07:07:04 crc kubenswrapper[4982]: E0122 07:07:04.502914 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="extract-utilities" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.502928 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="extract-utilities" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.503241 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c0ef41d-af92-40c8-b170-e2c0c76755ca" containerName="registry-server" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.504163 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.506672 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mfrrw" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.520049 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.538314 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p7wt\" (UniqueName: \"kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt\") pod \"mariadb-client\" (UID: \"f8ed5220-ba91-4512-a05a-e9e67e205cef\") " pod="openstack/mariadb-client" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.639612 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p7wt\" (UniqueName: \"kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt\") pod \"mariadb-client\" (UID: \"f8ed5220-ba91-4512-a05a-e9e67e205cef\") " pod="openstack/mariadb-client" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.665835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p7wt\" (UniqueName: \"kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt\") pod \"mariadb-client\" (UID: \"f8ed5220-ba91-4512-a05a-e9e67e205cef\") " pod="openstack/mariadb-client" Jan 22 07:07:04 crc kubenswrapper[4982]: I0122 07:07:04.838277 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:07:05 crc kubenswrapper[4982]: I0122 07:07:05.438170 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:05 crc kubenswrapper[4982]: W0122 07:07:05.439312 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ed5220_ba91_4512_a05a_e9e67e205cef.slice/crio-ebbdc5751318d0032729be1c5460b785b56d85e44eb41e372de95db35489b3e8 WatchSource:0}: Error finding container ebbdc5751318d0032729be1c5460b785b56d85e44eb41e372de95db35489b3e8: Status 404 returned error can't find the container with id ebbdc5751318d0032729be1c5460b785b56d85e44eb41e372de95db35489b3e8 Jan 22 07:07:05 crc kubenswrapper[4982]: I0122 07:07:05.525636 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f8ed5220-ba91-4512-a05a-e9e67e205cef","Type":"ContainerStarted","Data":"ebbdc5751318d0032729be1c5460b785b56d85e44eb41e372de95db35489b3e8"} Jan 22 07:07:06 crc kubenswrapper[4982]: I0122 07:07:06.538069 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f8ed5220-ba91-4512-a05a-e9e67e205cef","Type":"ContainerStarted","Data":"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207"} Jan 22 07:07:06 crc kubenswrapper[4982]: I0122 07:07:06.562037 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=1.903438424 podStartE2EDuration="2.562011896s" podCreationTimestamp="2026-01-22 07:07:04 +0000 UTC" firstStartedPulling="2026-01-22 07:07:05.441231345 +0000 UTC m=+4886.279869348" lastFinishedPulling="2026-01-22 07:07:06.099804827 +0000 UTC m=+4886.938442820" observedRunningTime="2026-01-22 07:07:06.557376022 +0000 UTC m=+4887.396014105" watchObservedRunningTime="2026-01-22 07:07:06.562011896 +0000 UTC m=+4887.400649909" Jan 22 07:07:16 crc kubenswrapper[4982]: E0122 07:07:16.426375 4982 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.158:43544->38.102.83.158:45495: write tcp 38.102.83.158:43544->38.102.83.158:45495: write: broken pipe Jan 22 07:07:19 crc kubenswrapper[4982]: I0122 07:07:19.633389 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:19 crc kubenswrapper[4982]: I0122 07:07:19.633936 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="f8ed5220-ba91-4512-a05a-e9e67e205cef" containerName="mariadb-client" containerID="cri-o://d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207" gracePeriod=30 Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.418701 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.609910 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p7wt\" (UniqueName: \"kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt\") pod \"f8ed5220-ba91-4512-a05a-e9e67e205cef\" (UID: \"f8ed5220-ba91-4512-a05a-e9e67e205cef\") " Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.620544 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt" (OuterVolumeSpecName: "kube-api-access-5p7wt") pod "f8ed5220-ba91-4512-a05a-e9e67e205cef" (UID: "f8ed5220-ba91-4512-a05a-e9e67e205cef"). InnerVolumeSpecName "kube-api-access-5p7wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.656385 4982 generic.go:334] "Generic (PLEG): container finished" podID="f8ed5220-ba91-4512-a05a-e9e67e205cef" containerID="d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207" exitCode=143 Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.656454 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f8ed5220-ba91-4512-a05a-e9e67e205cef","Type":"ContainerDied","Data":"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207"} Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.656486 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"f8ed5220-ba91-4512-a05a-e9e67e205cef","Type":"ContainerDied","Data":"ebbdc5751318d0032729be1c5460b785b56d85e44eb41e372de95db35489b3e8"} Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.656488 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.656528 4982 scope.go:117] "RemoveContainer" containerID="d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.678646 4982 scope.go:117] "RemoveContainer" containerID="d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207" Jan 22 07:07:20 crc kubenswrapper[4982]: E0122 07:07:20.679188 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207\": container with ID starting with d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207 not found: ID does not exist" containerID="d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.679277 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207"} err="failed to get container status \"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207\": rpc error: code = NotFound desc = could not find container \"d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207\": container with ID starting with d6b28182fe863ad55d20af447694442de40760fe140a45128c649e6fc5fa5207 not found: ID does not exist" Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.696114 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.703017 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:07:20 crc kubenswrapper[4982]: I0122 07:07:20.719040 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p7wt\" (UniqueName: \"kubernetes.io/projected/f8ed5220-ba91-4512-a05a-e9e67e205cef-kube-api-access-5p7wt\") on node \"crc\" DevicePath \"\"" Jan 22 07:07:21 crc kubenswrapper[4982]: I0122 07:07:21.735958 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ed5220-ba91-4512-a05a-e9e67e205cef" path="/var/lib/kubelet/pods/f8ed5220-ba91-4512-a05a-e9e67e205cef/volumes" Jan 22 07:08:02 crc kubenswrapper[4982]: I0122 07:08:02.618843 4982 scope.go:117] "RemoveContainer" containerID="c383fad269ae361d00654f91f53a07be2b18e16463bd1bd68dae25ad216c209c" Jan 22 07:09:18 crc kubenswrapper[4982]: I0122 07:09:18.973677 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:09:18 crc kubenswrapper[4982]: I0122 07:09:18.975152 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:09:48 crc kubenswrapper[4982]: I0122 07:09:48.973763 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:09:48 crc kubenswrapper[4982]: I0122 07:09:48.974445 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:10:06 crc kubenswrapper[4982]: I0122 07:10:06.907780 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:06 crc kubenswrapper[4982]: E0122 07:10:06.908966 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ed5220-ba91-4512-a05a-e9e67e205cef" containerName="mariadb-client" Jan 22 07:10:06 crc kubenswrapper[4982]: I0122 07:10:06.908987 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ed5220-ba91-4512-a05a-e9e67e205cef" containerName="mariadb-client" Jan 22 07:10:06 crc kubenswrapper[4982]: I0122 07:10:06.909228 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ed5220-ba91-4512-a05a-e9e67e205cef" containerName="mariadb-client" Jan 22 07:10:06 crc kubenswrapper[4982]: I0122 07:10:06.910894 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:06 crc kubenswrapper[4982]: I0122 07:10:06.922350 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.048102 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.048200 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfq9c\" (UniqueName: \"kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.048241 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.149546 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.149620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfq9c\" (UniqueName: \"kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.149647 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.150137 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.150149 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.170256 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfq9c\" (UniqueName: \"kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c\") pod \"certified-operators-spksj\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.238927 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:07 crc kubenswrapper[4982]: I0122 07:10:07.513084 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:08 crc kubenswrapper[4982]: I0122 07:10:08.145095 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerStarted","Data":"dacd63d0102d4496a2848fefe6a44479b5b4a0b4fa6d079c090ca164a0ddfac0"} Jan 22 07:10:09 crc kubenswrapper[4982]: I0122 07:10:09.153086 4982 generic.go:334] "Generic (PLEG): container finished" podID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerID="fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1" exitCode=0 Jan 22 07:10:09 crc kubenswrapper[4982]: I0122 07:10:09.153143 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerDied","Data":"fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1"} Jan 22 07:10:10 crc kubenswrapper[4982]: I0122 07:10:10.162956 4982 generic.go:334] "Generic (PLEG): container finished" podID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerID="69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df" exitCode=0 Jan 22 07:10:10 crc kubenswrapper[4982]: I0122 07:10:10.163071 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerDied","Data":"69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df"} Jan 22 07:10:11 crc kubenswrapper[4982]: I0122 07:10:11.171251 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerStarted","Data":"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e"} Jan 22 07:10:11 crc kubenswrapper[4982]: I0122 07:10:11.196806 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-spksj" podStartSLOduration=3.705164354 podStartE2EDuration="5.196788971s" podCreationTimestamp="2026-01-22 07:10:06 +0000 UTC" firstStartedPulling="2026-01-22 07:10:09.15597425 +0000 UTC m=+5069.994612253" lastFinishedPulling="2026-01-22 07:10:10.647598867 +0000 UTC m=+5071.486236870" observedRunningTime="2026-01-22 07:10:11.191686273 +0000 UTC m=+5072.030324276" watchObservedRunningTime="2026-01-22 07:10:11.196788971 +0000 UTC m=+5072.035426974" Jan 22 07:10:17 crc kubenswrapper[4982]: I0122 07:10:17.239651 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:17 crc kubenswrapper[4982]: I0122 07:10:17.240332 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:17 crc kubenswrapper[4982]: I0122 07:10:17.279473 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.257274 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.298359 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.973612 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.974031 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.974088 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.974699 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:10:18 crc kubenswrapper[4982]: I0122 07:10:18.974791 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b" gracePeriod=600 Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.262944 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b" exitCode=0 Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.263685 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-spksj" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="registry-server" containerID="cri-o://4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e" gracePeriod=2 Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.264069 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b"} Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.264127 4982 scope.go:117] "RemoveContainer" containerID="800a63134ac3b413ea38ff457c1f2e665d54754e82057e18f9f0b05326428673" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.699401 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.867339 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities\") pod \"acbb5e0d-ca22-45a0-b587-624b7e98f680\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.867392 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content\") pod \"acbb5e0d-ca22-45a0-b587-624b7e98f680\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.867427 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfq9c\" (UniqueName: \"kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c\") pod \"acbb5e0d-ca22-45a0-b587-624b7e98f680\" (UID: \"acbb5e0d-ca22-45a0-b587-624b7e98f680\") " Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.868345 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities" (OuterVolumeSpecName: "utilities") pod "acbb5e0d-ca22-45a0-b587-624b7e98f680" (UID: "acbb5e0d-ca22-45a0-b587-624b7e98f680"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.873021 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c" (OuterVolumeSpecName: "kube-api-access-vfq9c") pod "acbb5e0d-ca22-45a0-b587-624b7e98f680" (UID: "acbb5e0d-ca22-45a0-b587-624b7e98f680"). InnerVolumeSpecName "kube-api-access-vfq9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.912085 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "acbb5e0d-ca22-45a0-b587-624b7e98f680" (UID: "acbb5e0d-ca22-45a0-b587-624b7e98f680"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.969153 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.969199 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acbb5e0d-ca22-45a0-b587-624b7e98f680-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:10:20 crc kubenswrapper[4982]: I0122 07:10:20.969211 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfq9c\" (UniqueName: \"kubernetes.io/projected/acbb5e0d-ca22-45a0-b587-624b7e98f680-kube-api-access-vfq9c\") on node \"crc\" DevicePath \"\"" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.277976 4982 generic.go:334] "Generic (PLEG): container finished" podID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerID="4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e" exitCode=0 Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.278054 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spksj" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.278115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerDied","Data":"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e"} Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.278194 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spksj" event={"ID":"acbb5e0d-ca22-45a0-b587-624b7e98f680","Type":"ContainerDied","Data":"dacd63d0102d4496a2848fefe6a44479b5b4a0b4fa6d079c090ca164a0ddfac0"} Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.278235 4982 scope.go:117] "RemoveContainer" containerID="4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.281403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc"} Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.305802 4982 scope.go:117] "RemoveContainer" containerID="69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.330776 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.340965 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-spksj"] Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.347561 4982 scope.go:117] "RemoveContainer" containerID="fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.370704 4982 scope.go:117] "RemoveContainer" containerID="4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e" Jan 22 07:10:21 crc kubenswrapper[4982]: E0122 07:10:21.371067 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e\": container with ID starting with 4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e not found: ID does not exist" containerID="4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.371116 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e"} err="failed to get container status \"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e\": rpc error: code = NotFound desc = could not find container \"4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e\": container with ID starting with 4fdadd8fc52dbae4c1944e29da7f7e9a051c37f88dfea7f132f9c8a6c3082d2e not found: ID does not exist" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.371144 4982 scope.go:117] "RemoveContainer" containerID="69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df" Jan 22 07:10:21 crc kubenswrapper[4982]: E0122 07:10:21.371368 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df\": container with ID starting with 69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df not found: ID does not exist" containerID="69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.371396 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df"} err="failed to get container status \"69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df\": rpc error: code = NotFound desc = could not find container \"69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df\": container with ID starting with 69222af0ed124e346a3447d28c8d4fea6d233c47fa08fda42f729015b9f382df not found: ID does not exist" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.371414 4982 scope.go:117] "RemoveContainer" containerID="fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1" Jan 22 07:10:21 crc kubenswrapper[4982]: E0122 07:10:21.372026 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1\": container with ID starting with fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1 not found: ID does not exist" containerID="fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.372105 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1"} err="failed to get container status \"fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1\": rpc error: code = NotFound desc = could not find container \"fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1\": container with ID starting with fb5529b07711007903e3127fd2daf0890f5512d642b54bb8291b9e7bba94d4a1 not found: ID does not exist" Jan 22 07:10:21 crc kubenswrapper[4982]: I0122 07:10:21.729128 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" path="/var/lib/kubelet/pods/acbb5e0d-ca22-45a0-b587-624b7e98f680/volumes" Jan 22 07:11:02 crc kubenswrapper[4982]: I0122 07:11:02.712542 4982 scope.go:117] "RemoveContainer" containerID="2cd25ece81a7e80c92619a17f6bec429a2b0c3ba6b5aa5a3afe0ca9dbe70bb76" Jan 22 07:11:02 crc kubenswrapper[4982]: I0122 07:11:02.735078 4982 scope.go:117] "RemoveContainer" containerID="1263e2dfd7d4e5f7e54cf90110e0f79b3cd3924b7d332b901e535cb9a369b62a" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.365829 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 07:11:50 crc kubenswrapper[4982]: E0122 07:11:50.366669 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="extract-utilities" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.366683 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="extract-utilities" Jan 22 07:11:50 crc kubenswrapper[4982]: E0122 07:11:50.366703 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="registry-server" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.366710 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="registry-server" Jan 22 07:11:50 crc kubenswrapper[4982]: E0122 07:11:50.366727 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="extract-content" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.366733 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="extract-content" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.366890 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="acbb5e0d-ca22-45a0-b587-624b7e98f680" containerName="registry-server" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.367352 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.369404 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mfrrw" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.383968 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.487111 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.487206 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdlvk\" (UniqueName: \"kubernetes.io/projected/7b376af2-ced4-4234-a949-f3c81e220a11-kube-api-access-wdlvk\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.589549 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdlvk\" (UniqueName: \"kubernetes.io/projected/7b376af2-ced4-4234-a949-f3c81e220a11-kube-api-access-wdlvk\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.589660 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.591726 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.591761 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6f68157a13f48c76574b9daa092529eb09035b5f29cdc6b05d0e1cb0a40871a0/globalmount\"" pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.614890 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdlvk\" (UniqueName: \"kubernetes.io/projected/7b376af2-ced4-4234-a949-f3c81e220a11-kube-api-access-wdlvk\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.618301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c42c1d22-0bc2-4561-a965-8fab31e1dfd7\") pod \"mariadb-copy-data\" (UID: \"7b376af2-ced4-4234-a949-f3c81e220a11\") " pod="openstack/mariadb-copy-data" Jan 22 07:11:50 crc kubenswrapper[4982]: I0122 07:11:50.693989 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 07:11:53 crc kubenswrapper[4982]: I0122 07:11:53.404423 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 07:11:54 crc kubenswrapper[4982]: I0122 07:11:54.004474 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7b376af2-ced4-4234-a949-f3c81e220a11","Type":"ContainerStarted","Data":"98e4509e558c001e4eb0db164441b2c52e3cc959e25e707f263dec66ca75f5b9"} Jan 22 07:11:54 crc kubenswrapper[4982]: I0122 07:11:54.005177 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7b376af2-ced4-4234-a949-f3c81e220a11","Type":"ContainerStarted","Data":"2eb93e2f669bdabb17d27280118942889a4983c408600e0afb36767a388759fb"} Jan 22 07:11:54 crc kubenswrapper[4982]: I0122 07:11:54.022564 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=5.022496519 podStartE2EDuration="5.022496519s" podCreationTimestamp="2026-01-22 07:11:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:11:54.017605607 +0000 UTC m=+5174.856243630" watchObservedRunningTime="2026-01-22 07:11:54.022496519 +0000 UTC m=+5174.861134562" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.015135 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.016956 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.023216 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.084769 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4sbb\" (UniqueName: \"kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb\") pod \"mariadb-client\" (UID: \"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6\") " pod="openstack/mariadb-client" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.186671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4sbb\" (UniqueName: \"kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb\") pod \"mariadb-client\" (UID: \"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6\") " pod="openstack/mariadb-client" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.206709 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4sbb\" (UniqueName: \"kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb\") pod \"mariadb-client\" (UID: \"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6\") " pod="openstack/mariadb-client" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.345007 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:11:57 crc kubenswrapper[4982]: I0122 07:11:57.857743 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:11:58 crc kubenswrapper[4982]: I0122 07:11:58.035690 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6","Type":"ContainerStarted","Data":"d22931cc28b66bc73befd02f058e125783f151317e02595e45f9654c5dde27d0"} Jan 22 07:11:59 crc kubenswrapper[4982]: I0122 07:11:59.048064 4982 generic.go:334] "Generic (PLEG): container finished" podID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" containerID="989fad8b473b28c5abf128dd8119b0899546a4abd95f77a0b72ee8c974624505" exitCode=0 Jan 22 07:11:59 crc kubenswrapper[4982]: I0122 07:11:59.048105 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6","Type":"ContainerDied","Data":"989fad8b473b28c5abf128dd8119b0899546a4abd95f77a0b72ee8c974624505"} Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.886178 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.907087 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_9f3a575f-ac96-4abd-bb10-c8e64bbeadf6/mariadb-client/0.log" Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.934322 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.940450 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.964481 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4sbb\" (UniqueName: \"kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb\") pod \"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6\" (UID: \"9f3a575f-ac96-4abd-bb10-c8e64bbeadf6\") " Jan 22 07:12:00 crc kubenswrapper[4982]: I0122 07:12:00.970371 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb" (OuterVolumeSpecName: "kube-api-access-t4sbb") pod "9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" (UID: "9f3a575f-ac96-4abd-bb10-c8e64bbeadf6"). InnerVolumeSpecName "kube-api-access-t4sbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.076761 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d22931cc28b66bc73befd02f058e125783f151317e02595e45f9654c5dde27d0" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.077740 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.078509 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4sbb\" (UniqueName: \"kubernetes.io/projected/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6-kube-api-access-t4sbb\") on node \"crc\" DevicePath \"\"" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.089903 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:01 crc kubenswrapper[4982]: E0122 07:12:01.090339 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" containerName="mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.090386 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" containerName="mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.090810 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" containerName="mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.091596 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.098580 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.106445 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" podUID="5b56c7cf-6c9a-460a-b003-d848b327e10c" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.180350 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h796k\" (UniqueName: \"kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k\") pod \"mariadb-client\" (UID: \"5b56c7cf-6c9a-460a-b003-d848b327e10c\") " pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.281772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h796k\" (UniqueName: \"kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k\") pod \"mariadb-client\" (UID: \"5b56c7cf-6c9a-460a-b003-d848b327e10c\") " pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.299118 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h796k\" (UniqueName: \"kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k\") pod \"mariadb-client\" (UID: \"5b56c7cf-6c9a-460a-b003-d848b327e10c\") " pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.426206 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.729123 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f3a575f-ac96-4abd-bb10-c8e64bbeadf6" path="/var/lib/kubelet/pods/9f3a575f-ac96-4abd-bb10-c8e64bbeadf6/volumes" Jan 22 07:12:01 crc kubenswrapper[4982]: I0122 07:12:01.933673 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:01 crc kubenswrapper[4982]: W0122 07:12:01.943776 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b56c7cf_6c9a_460a_b003_d848b327e10c.slice/crio-b3ed324dfbb73f40a56cc2bc183f7c56bf24c99a8377ff785e2cfcf413e2a69c WatchSource:0}: Error finding container b3ed324dfbb73f40a56cc2bc183f7c56bf24c99a8377ff785e2cfcf413e2a69c: Status 404 returned error can't find the container with id b3ed324dfbb73f40a56cc2bc183f7c56bf24c99a8377ff785e2cfcf413e2a69c Jan 22 07:12:02 crc kubenswrapper[4982]: I0122 07:12:02.086083 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5b56c7cf-6c9a-460a-b003-d848b327e10c","Type":"ContainerStarted","Data":"b3ed324dfbb73f40a56cc2bc183f7c56bf24c99a8377ff785e2cfcf413e2a69c"} Jan 22 07:12:02 crc kubenswrapper[4982]: I0122 07:12:02.823434 4982 scope.go:117] "RemoveContainer" containerID="414e130d17050cae251992e48b4dbc1db06f79ef659a8750cf0ea3bd97886d46" Jan 22 07:12:03 crc kubenswrapper[4982]: I0122 07:12:03.097285 4982 generic.go:334] "Generic (PLEG): container finished" podID="5b56c7cf-6c9a-460a-b003-d848b327e10c" containerID="45340d75c2c31c6f499c6b02f0c6bd9d3447cdbdcff688cd951806d16f19dae8" exitCode=0 Jan 22 07:12:03 crc kubenswrapper[4982]: I0122 07:12:03.097353 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5b56c7cf-6c9a-460a-b003-d848b327e10c","Type":"ContainerDied","Data":"45340d75c2c31c6f499c6b02f0c6bd9d3447cdbdcff688cd951806d16f19dae8"} Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.447070 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.464526 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_5b56c7cf-6c9a-460a-b003-d848b327e10c/mariadb-client/0.log" Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.502375 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.509698 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.631958 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h796k\" (UniqueName: \"kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k\") pod \"5b56c7cf-6c9a-460a-b003-d848b327e10c\" (UID: \"5b56c7cf-6c9a-460a-b003-d848b327e10c\") " Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.640237 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k" (OuterVolumeSpecName: "kube-api-access-h796k") pod "5b56c7cf-6c9a-460a-b003-d848b327e10c" (UID: "5b56c7cf-6c9a-460a-b003-d848b327e10c"). InnerVolumeSpecName "kube-api-access-h796k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:12:04 crc kubenswrapper[4982]: I0122 07:12:04.736860 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h796k\" (UniqueName: \"kubernetes.io/projected/5b56c7cf-6c9a-460a-b003-d848b327e10c-kube-api-access-h796k\") on node \"crc\" DevicePath \"\"" Jan 22 07:12:05 crc kubenswrapper[4982]: I0122 07:12:05.115653 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3ed324dfbb73f40a56cc2bc183f7c56bf24c99a8377ff785e2cfcf413e2a69c" Jan 22 07:12:05 crc kubenswrapper[4982]: I0122 07:12:05.115701 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 07:12:05 crc kubenswrapper[4982]: I0122 07:12:05.749005 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b56c7cf-6c9a-460a-b003-d848b327e10c" path="/var/lib/kubelet/pods/5b56c7cf-6c9a-460a-b003-d848b327e10c/volumes" Jan 22 07:12:48 crc kubenswrapper[4982]: I0122 07:12:48.973649 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:12:48 crc kubenswrapper[4982]: I0122 07:12:48.974148 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.174290 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 07:13:18 crc kubenswrapper[4982]: E0122 07:13:18.176392 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b56c7cf-6c9a-460a-b003-d848b327e10c" containerName="mariadb-client" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.176428 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b56c7cf-6c9a-460a-b003-d848b327e10c" containerName="mariadb-client" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.176746 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b56c7cf-6c9a-460a-b003-d848b327e10c" containerName="mariadb-client" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.178537 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.182652 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.182696 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8fx7z" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.185162 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.193265 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.195314 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.197330 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.203655 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.205534 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.230585 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.255996 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330475 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330514 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/002ade31-2880-47c6-9ed7-5bb12d2bcda9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-config\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330556 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bded9234-3e82-419e-82b4-a43809301833-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330576 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330591 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330691 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330722 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lsf5\" (UniqueName: \"kubernetes.io/projected/002ade31-2880-47c6-9ed7-5bb12d2bcda9-kube-api-access-4lsf5\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330750 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-config\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330825 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdsdj\" (UniqueName: \"kubernetes.io/projected/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-kube-api-access-hdsdj\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-config\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330960 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.330988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8sr6\" (UniqueName: \"kubernetes.io/projected/bded9234-3e82-419e-82b4-a43809301833-kube-api-access-s8sr6\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.331007 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/002ade31-2880-47c6-9ed7-5bb12d2bcda9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.331033 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bded9234-3e82-419e-82b4-a43809301833-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.367088 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.369294 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.372470 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.374298 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.374434 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-4qppf" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.379818 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.399018 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.401234 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.406322 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.407789 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.410910 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.415682 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-config\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432543 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdsdj\" (UniqueName: \"kubernetes.io/projected/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-kube-api-access-hdsdj\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432628 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077ea28-6c68-4940-b841-7d113bcd8394-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-config\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432802 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.432908 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433018 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8sr6\" (UniqueName: \"kubernetes.io/projected/bded9234-3e82-419e-82b4-a43809301833-kube-api-access-s8sr6\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/002ade31-2880-47c6-9ed7-5bb12d2bcda9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433265 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bded9234-3e82-419e-82b4-a43809301833-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433378 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/002ade31-2880-47c6-9ed7-5bb12d2bcda9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433565 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-config\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433656 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bded9234-3e82-419e-82b4-a43809301833-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433755 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077ea28-6c68-4940-b841-7d113bcd8394-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.433985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434108 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434218 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434331 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-config\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434430 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lsf5\" (UniqueName: \"kubernetes.io/projected/002ade31-2880-47c6-9ed7-5bb12d2bcda9-kube-api-access-4lsf5\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434542 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql8gn\" (UniqueName: \"kubernetes.io/projected/d077ea28-6c68-4940-b841-7d113bcd8394-kube-api-access-ql8gn\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434917 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.435021 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/002ade31-2880-47c6-9ed7-5bb12d2bcda9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.435520 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.435649 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/523b5e417fdaae551252acfbecb5411dc9597851e91ea4900ec506fde08595bc/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.435909 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bded9234-3e82-419e-82b4-a43809301833-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.435985 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-config\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.434761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-config\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.436307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/002ade31-2880-47c6-9ed7-5bb12d2bcda9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.436374 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.436486 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.437007 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-config\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.440714 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bded9234-3e82-419e-82b4-a43809301833-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.442676 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bded9234-3e82-419e-82b4-a43809301833-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.443094 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/002ade31-2880-47c6-9ed7-5bb12d2bcda9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.444507 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.444539 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ed03c68b5711227396a97f2fe8d337cc3a23f266142534ec4d6d50a5172fd633/globalmount\"" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.444589 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.444620 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3e6b1dea7b0d5e166202f56702f374f11980a9cd2f16b47cb2fc3159839794d7/globalmount\"" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.453051 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdsdj\" (UniqueName: \"kubernetes.io/projected/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-kube-api-access-hdsdj\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.454533 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.455928 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lsf5\" (UniqueName: \"kubernetes.io/projected/002ade31-2880-47c6-9ed7-5bb12d2bcda9-kube-api-access-4lsf5\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.458696 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8sr6\" (UniqueName: \"kubernetes.io/projected/bded9234-3e82-419e-82b4-a43809301833-kube-api-access-s8sr6\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.488881 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a9ad67d7-f4ef-4f12-90b0-db12778e94bd\") pod \"ovsdbserver-nb-1\" (UID: \"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5\") " pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.495251 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c927a30b-ef12-44c4-a178-9a8e82cc5262\") pod \"ovsdbserver-nb-0\" (UID: \"002ade31-2880-47c6-9ed7-5bb12d2bcda9\") " pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.511843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0c8af5d5-9cf8-4e79-8af0-095275a7a731\") pod \"ovsdbserver-nb-2\" (UID: \"bded9234-3e82-419e-82b4-a43809301833\") " pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.513133 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536104 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536514 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536570 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/895d6b93-d214-4982-9246-43d77890c2f3-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536590 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536609 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-153a69f0-512b-4b57-a752-96215d85288a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-153a69f0-512b-4b57-a752-96215d85288a\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077ea28-6c68-4940-b841-7d113bcd8394-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536680 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/895d6b93-d214-4982-9246-43d77890c2f3-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536706 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khbsn\" (UniqueName: \"kubernetes.io/projected/895d6b93-d214-4982-9246-43d77890c2f3-kube-api-access-khbsn\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536732 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536746 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-config\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536779 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql8gn\" (UniqueName: \"kubernetes.io/projected/d077ea28-6c68-4940-b841-7d113bcd8394-kube-api-access-ql8gn\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536830 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-config\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536845 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-config\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536880 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077ea28-6c68-4940-b841-7d113bcd8394-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536894 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzb8q\" (UniqueName: \"kubernetes.io/projected/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-kube-api-access-hzb8q\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.536932 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.537389 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.537491 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077ea28-6c68-4940-b841-7d113bcd8394-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.538501 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077ea28-6c68-4940-b841-7d113bcd8394-config\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.539920 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.539968 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4a7406e99d5a6c4a8db34b4ea0753e7d87dd7abc001056e40534552eb951c48f/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.541299 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077ea28-6c68-4940-b841-7d113bcd8394-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.547154 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.556793 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql8gn\" (UniqueName: \"kubernetes.io/projected/d077ea28-6c68-4940-b841-7d113bcd8394-kube-api-access-ql8gn\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.572349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ed9b7f42-2d3f-4129-aba2-ce2bcc34a0a6\") pod \"ovsdbserver-sb-0\" (UID: \"d077ea28-6c68-4940-b841-7d113bcd8394\") " pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638380 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/895d6b93-d214-4982-9246-43d77890c2f3-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638447 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khbsn\" (UniqueName: \"kubernetes.io/projected/895d6b93-d214-4982-9246-43d77890c2f3-kube-api-access-khbsn\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-config\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-config\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638574 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638598 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzb8q\" (UniqueName: \"kubernetes.io/projected/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-kube-api-access-hzb8q\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638636 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638686 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/895d6b93-d214-4982-9246-43d77890c2f3-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638711 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.638733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-153a69f0-512b-4b57-a752-96215d85288a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-153a69f0-512b-4b57-a752-96215d85288a\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.639767 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-config\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.640049 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-config\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.640270 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/895d6b93-d214-4982-9246-43d77890c2f3-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.640368 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/895d6b93-d214-4982-9246-43d77890c2f3-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.640678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.642165 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.648813 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.648873 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/01c3989db581e26e750bd9ef19e9f1cd4e53498b1186aba23ad0e42f32755314/globalmount\"" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.649037 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.649061 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-153a69f0-512b-4b57-a752-96215d85288a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-153a69f0-512b-4b57-a752-96215d85288a\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/816832fb2ab5b577139595c7b50dbde50fdf75615b6d5ebdae1024ac61ec57a1/globalmount\"" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.656416 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/895d6b93-d214-4982-9246-43d77890c2f3-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.660123 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.660902 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khbsn\" (UniqueName: \"kubernetes.io/projected/895d6b93-d214-4982-9246-43d77890c2f3-kube-api-access-khbsn\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.665725 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzb8q\" (UniqueName: \"kubernetes.io/projected/ef8a87c1-3a5f-4159-af3f-f854b1b16ef6-kube-api-access-hzb8q\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.687287 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f35e22df-3e50-4d20-92d8-ccb50695c61e\") pod \"ovsdbserver-sb-2\" (UID: \"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6\") " pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.688735 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-153a69f0-512b-4b57-a752-96215d85288a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-153a69f0-512b-4b57-a752-96215d85288a\") pod \"ovsdbserver-sb-1\" (UID: \"895d6b93-d214-4982-9246-43d77890c2f3\") " pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.694205 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.721182 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.733228 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.974177 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:13:18 crc kubenswrapper[4982]: I0122 07:13:18.974232 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.063587 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 07:13:19 crc kubenswrapper[4982]: W0122 07:13:19.167015 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79ec12c1_c68b_4ca4_8ac1_3a3ef740eda5.slice/crio-59275a4efcc70bc7327d84e6395e741c9744b894c931bcbd0a47705249252e50 WatchSource:0}: Error finding container 59275a4efcc70bc7327d84e6395e741c9744b894c931bcbd0a47705249252e50: Status 404 returned error can't find the container with id 59275a4efcc70bc7327d84e6395e741c9744b894c931bcbd0a47705249252e50 Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.168086 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.309673 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.770604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5","Type":"ContainerStarted","Data":"0c407fff3b3c7dffc5326c4219009c1674e1f6ed3825e484c5cd4ceabf0910b9"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.770659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5","Type":"ContainerStarted","Data":"082345522aa13e7e93a1ec35ab0b597bf5dbe75b9c6533a75c552173aa9ce6af"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.770680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5","Type":"ContainerStarted","Data":"59275a4efcc70bc7327d84e6395e741c9744b894c931bcbd0a47705249252e50"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.774468 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"895d6b93-d214-4982-9246-43d77890c2f3","Type":"ContainerStarted","Data":"4bf163a8f34f19a31c1c2e895b8fca0fc1218bbb3e23a36e5277638dc26e5f03"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.774517 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"895d6b93-d214-4982-9246-43d77890c2f3","Type":"ContainerStarted","Data":"a3e36ecb47ef0700c86afa2abffb88f1e463c0bc6df3b99054c917819e188bde"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.774531 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"895d6b93-d214-4982-9246-43d77890c2f3","Type":"ContainerStarted","Data":"bf2c8c01f641dc8ea150cd93bafbd6bfc8fd148e8c3f6b5654672fd64ee479aa"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.776695 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"002ade31-2880-47c6-9ed7-5bb12d2bcda9","Type":"ContainerStarted","Data":"07f6795cb2450021db02f8fa0e35253e54b24a7de14457db69d7506093bbdf56"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.776729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"002ade31-2880-47c6-9ed7-5bb12d2bcda9","Type":"ContainerStarted","Data":"9dfcab48251a53d9182860ffc8fa4c856c41e6fa16d2fcc23bd612def4a8d027"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.776744 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"002ade31-2880-47c6-9ed7-5bb12d2bcda9","Type":"ContainerStarted","Data":"ed5b46695ed3472336d9409e747fda8f13bf131db79de7ae13ab082d2ab1d560"} Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.808466 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=2.808443422 podStartE2EDuration="2.808443422s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:19.801056262 +0000 UTC m=+5260.639694265" watchObservedRunningTime="2026-01-22 07:13:19.808443422 +0000 UTC m=+5260.647081435" Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.820945 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 07:13:19 crc kubenswrapper[4982]: W0122 07:13:19.822040 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbded9234_3e82_419e_82b4_a43809301833.slice/crio-fb0343b0748a0290fe124d0e365bf81b0bd396e4cb314bef46e214b2a2150972 WatchSource:0}: Error finding container fb0343b0748a0290fe124d0e365bf81b0bd396e4cb314bef46e214b2a2150972: Status 404 returned error can't find the container with id fb0343b0748a0290fe124d0e365bf81b0bd396e4cb314bef46e214b2a2150972 Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.823430 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=2.823407695 podStartE2EDuration="2.823407695s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:19.822416949 +0000 UTC m=+5260.661054962" watchObservedRunningTime="2026-01-22 07:13:19.823407695 +0000 UTC m=+5260.662045718" Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.852697 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=2.852678584 podStartE2EDuration="2.852678584s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:19.84175663 +0000 UTC m=+5260.680394633" watchObservedRunningTime="2026-01-22 07:13:19.852678584 +0000 UTC m=+5260.691316587" Jan 22 07:13:19 crc kubenswrapper[4982]: I0122 07:13:19.943044 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 07:13:19 crc kubenswrapper[4982]: W0122 07:13:19.954992 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef8a87c1_3a5f_4159_af3f_f854b1b16ef6.slice/crio-1ae899e6aa22dd1c8e054b07b98f16fbf16b0c2bc79322ff86b20513376ba379 WatchSource:0}: Error finding container 1ae899e6aa22dd1c8e054b07b98f16fbf16b0c2bc79322ff86b20513376ba379: Status 404 returned error can't find the container with id 1ae899e6aa22dd1c8e054b07b98f16fbf16b0c2bc79322ff86b20513376ba379 Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.420388 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 07:13:20 crc kubenswrapper[4982]: W0122 07:13:20.421964 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd077ea28_6c68_4940_b841_7d113bcd8394.slice/crio-ea45ded8e7b1d91b1e078c2712532aed4b010a78760b63179bec815237165b6d WatchSource:0}: Error finding container ea45ded8e7b1d91b1e078c2712532aed4b010a78760b63179bec815237165b6d: Status 404 returned error can't find the container with id ea45ded8e7b1d91b1e078c2712532aed4b010a78760b63179bec815237165b6d Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.787942 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"bded9234-3e82-419e-82b4-a43809301833","Type":"ContainerStarted","Data":"8de1fd0c819e0d7557e9c7b6ae9cec5ae183935056ad04380602f7dcb96a149f"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.788322 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"bded9234-3e82-419e-82b4-a43809301833","Type":"ContainerStarted","Data":"bbae77251bea474aa53f92c3ef82422958e4b13e5dcf4b6d33b434808f9434d2"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.788446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"bded9234-3e82-419e-82b4-a43809301833","Type":"ContainerStarted","Data":"fb0343b0748a0290fe124d0e365bf81b0bd396e4cb314bef46e214b2a2150972"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.790113 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6","Type":"ContainerStarted","Data":"e849e6afaed6b4a10676ac4f0d6fad03e0896caa640ed9ce67b7c5114be44068"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.790140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6","Type":"ContainerStarted","Data":"4f492d405fb9fcbcfd581e0eadafff08cfbf70233dd6e77892f5f09405d442b7"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.790151 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ef8a87c1-3a5f-4159-af3f-f854b1b16ef6","Type":"ContainerStarted","Data":"1ae899e6aa22dd1c8e054b07b98f16fbf16b0c2bc79322ff86b20513376ba379"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.793005 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d077ea28-6c68-4940-b841-7d113bcd8394","Type":"ContainerStarted","Data":"9a9dac53bba07f907db63951883588be020c4263f843075c1756dba414157564"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.793036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d077ea28-6c68-4940-b841-7d113bcd8394","Type":"ContainerStarted","Data":"1842cecc90f8573e4e44ff77ee4b22f6f2d67e88f1138f496c92019b904ded97"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.793049 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d077ea28-6c68-4940-b841-7d113bcd8394","Type":"ContainerStarted","Data":"ea45ded8e7b1d91b1e078c2712532aed4b010a78760b63179bec815237165b6d"} Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.843595 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.843569624 podStartE2EDuration="3.843569624s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:20.83230261 +0000 UTC m=+5261.670940633" watchObservedRunningTime="2026-01-22 07:13:20.843569624 +0000 UTC m=+5261.682207627" Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.844870 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.844841508 podStartE2EDuration="3.844841508s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:20.816882705 +0000 UTC m=+5261.655520718" watchObservedRunningTime="2026-01-22 07:13:20.844841508 +0000 UTC m=+5261.683479511" Jan 22 07:13:20 crc kubenswrapper[4982]: I0122 07:13:20.856406 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.856384579 podStartE2EDuration="3.856384579s" podCreationTimestamp="2026-01-22 07:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:20.852260428 +0000 UTC m=+5261.690898441" watchObservedRunningTime="2026-01-22 07:13:20.856384579 +0000 UTC m=+5261.695022592" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.514385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.537008 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.548260 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.694957 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.733649 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:21 crc kubenswrapper[4982]: I0122 07:13:21.734114 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.513846 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.537100 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.548313 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.694346 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.737933 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:23 crc kubenswrapper[4982]: I0122 07:13:23.737989 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.561374 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.591046 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.596165 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.601402 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.637952 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.779096 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.780632 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.786214 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.841251 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.847912 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.880176 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.880286 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.885303 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.943770 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.943836 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.944363 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:24 crc kubenswrapper[4982]: I0122 07:13:24.944992 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqx6h\" (UniqueName: \"kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.046671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqx6h\" (UniqueName: \"kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.046735 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.046765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.046863 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.047738 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.047781 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.048418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.074127 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqx6h\" (UniqueName: \"kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h\") pod \"dnsmasq-dns-5d75cc5ff9-bqrw7\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.204317 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.231476 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.267497 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.268886 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.272415 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.290404 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.351317 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.351401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.351426 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.351467 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.351482 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n5tb\" (UniqueName: \"kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.455713 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.456059 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.456814 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.456839 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.456890 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n5tb\" (UniqueName: \"kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.456910 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.457214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.457752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.457912 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.479576 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n5tb\" (UniqueName: \"kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb\") pod \"dnsmasq-dns-587db994f9-tcc86\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.663608 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.738233 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.844160 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" event={"ID":"f84f07aa-c943-42d7-b1ca-9c762d62a06d","Type":"ContainerStarted","Data":"22da5c877c6b6f32f213b3e40e04a31524e034549c8234bd6c4a84a09cc87202"} Jan 22 07:13:25 crc kubenswrapper[4982]: I0122 07:13:25.896605 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:13:25 crc kubenswrapper[4982]: W0122 07:13:25.908950 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf949af24_9fbd_416d_b200_4c1c2d36275b.slice/crio-17a20b4f130afe1b864503d892e05d66baa4d4b289951786ca75dec4b18dfe9d WatchSource:0}: Error finding container 17a20b4f130afe1b864503d892e05d66baa4d4b289951786ca75dec4b18dfe9d: Status 404 returned error can't find the container with id 17a20b4f130afe1b864503d892e05d66baa4d4b289951786ca75dec4b18dfe9d Jan 22 07:13:26 crc kubenswrapper[4982]: I0122 07:13:26.852388 4982 generic.go:334] "Generic (PLEG): container finished" podID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerID="81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1" exitCode=0 Jan 22 07:13:26 crc kubenswrapper[4982]: I0122 07:13:26.852447 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-587db994f9-tcc86" event={"ID":"f949af24-9fbd-416d-b200-4c1c2d36275b","Type":"ContainerDied","Data":"81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1"} Jan 22 07:13:26 crc kubenswrapper[4982]: I0122 07:13:26.852473 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-587db994f9-tcc86" event={"ID":"f949af24-9fbd-416d-b200-4c1c2d36275b","Type":"ContainerStarted","Data":"17a20b4f130afe1b864503d892e05d66baa4d4b289951786ca75dec4b18dfe9d"} Jan 22 07:13:26 crc kubenswrapper[4982]: I0122 07:13:26.855946 4982 generic.go:334] "Generic (PLEG): container finished" podID="f84f07aa-c943-42d7-b1ca-9c762d62a06d" containerID="bc421850e8045c3e002e6ee08921accf2d787a25e25796c08071bbd75508bdff" exitCode=0 Jan 22 07:13:26 crc kubenswrapper[4982]: I0122 07:13:26.856119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" event={"ID":"f84f07aa-c943-42d7-b1ca-9c762d62a06d","Type":"ContainerDied","Data":"bc421850e8045c3e002e6ee08921accf2d787a25e25796c08071bbd75508bdff"} Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.159054 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.187575 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config\") pod \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.188417 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb\") pod \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.188499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqx6h\" (UniqueName: \"kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h\") pod \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.188755 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc\") pod \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\" (UID: \"f84f07aa-c943-42d7-b1ca-9c762d62a06d\") " Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.213197 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h" (OuterVolumeSpecName: "kube-api-access-cqx6h") pod "f84f07aa-c943-42d7-b1ca-9c762d62a06d" (UID: "f84f07aa-c943-42d7-b1ca-9c762d62a06d"). InnerVolumeSpecName "kube-api-access-cqx6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.215518 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f84f07aa-c943-42d7-b1ca-9c762d62a06d" (UID: "f84f07aa-c943-42d7-b1ca-9c762d62a06d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.217578 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f84f07aa-c943-42d7-b1ca-9c762d62a06d" (UID: "f84f07aa-c943-42d7-b1ca-9c762d62a06d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.233523 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config" (OuterVolumeSpecName: "config") pod "f84f07aa-c943-42d7-b1ca-9c762d62a06d" (UID: "f84f07aa-c943-42d7-b1ca-9c762d62a06d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.290684 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.290718 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqx6h\" (UniqueName: \"kubernetes.io/projected/f84f07aa-c943-42d7-b1ca-9c762d62a06d-kube-api-access-cqx6h\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.290731 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.290744 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84f07aa-c943-42d7-b1ca-9c762d62a06d-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.865271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" event={"ID":"f84f07aa-c943-42d7-b1ca-9c762d62a06d","Type":"ContainerDied","Data":"22da5c877c6b6f32f213b3e40e04a31524e034549c8234bd6c4a84a09cc87202"} Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.865650 4982 scope.go:117] "RemoveContainer" containerID="bc421850e8045c3e002e6ee08921accf2d787a25e25796c08071bbd75508bdff" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.865314 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75cc5ff9-bqrw7" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.869566 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-587db994f9-tcc86" event={"ID":"f949af24-9fbd-416d-b200-4c1c2d36275b","Type":"ContainerStarted","Data":"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290"} Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.869732 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.934913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.951421 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d75cc5ff9-bqrw7"] Jan 22 07:13:27 crc kubenswrapper[4982]: I0122 07:13:27.952574 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-587db994f9-tcc86" podStartSLOduration=2.952565159 podStartE2EDuration="2.952565159s" podCreationTimestamp="2026-01-22 07:13:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:27.948541461 +0000 UTC m=+5268.787179474" watchObservedRunningTime="2026-01-22 07:13:27.952565159 +0000 UTC m=+5268.791203162" Jan 22 07:13:28 crc kubenswrapper[4982]: I0122 07:13:28.573524 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Jan 22 07:13:28 crc kubenswrapper[4982]: I0122 07:13:28.735309 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 22 07:13:28 crc kubenswrapper[4982]: I0122 07:13:28.790906 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Jan 22 07:13:29 crc kubenswrapper[4982]: I0122 07:13:29.731568 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f84f07aa-c943-42d7-b1ca-9c762d62a06d" path="/var/lib/kubelet/pods/f84f07aa-c943-42d7-b1ca-9c762d62a06d/volumes" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.656242 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Jan 22 07:13:31 crc kubenswrapper[4982]: E0122 07:13:31.657447 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f84f07aa-c943-42d7-b1ca-9c762d62a06d" containerName="init" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.657534 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f84f07aa-c943-42d7-b1ca-9c762d62a06d" containerName="init" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.657912 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f84f07aa-c943-42d7-b1ca-9c762d62a06d" containerName="init" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.658958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.665743 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.680059 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.784885 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/219b433b-fe06-4df4-856a-faf3851da20f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.784987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.785046 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/219b433b-fe06-4df4-856a-faf3851da20f-kube-api-access-jhnk9\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.886732 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/219b433b-fe06-4df4-856a-faf3851da20f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.886827 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.886949 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/219b433b-fe06-4df4-856a-faf3851da20f-kube-api-access-jhnk9\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.892068 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.892124 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bc3815f42bf694807805fe0f5f11b9153e97776ee2597fc5ee418491ab95c4b4/globalmount\"" pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.894002 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/219b433b-fe06-4df4-856a-faf3851da20f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.909185 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/219b433b-fe06-4df4-856a-faf3851da20f-kube-api-access-jhnk9\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.943145 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86cd8aed-0d6b-40c9-a13a-9fd48ab572aa\") pod \"ovn-copy-data\" (UID: \"219b433b-fe06-4df4-856a-faf3851da20f\") " pod="openstack/ovn-copy-data" Jan 22 07:13:31 crc kubenswrapper[4982]: I0122 07:13:31.987684 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 07:13:32 crc kubenswrapper[4982]: I0122 07:13:32.580053 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 07:13:32 crc kubenswrapper[4982]: I0122 07:13:32.588034 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:13:32 crc kubenswrapper[4982]: I0122 07:13:32.918327 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"219b433b-fe06-4df4-856a-faf3851da20f","Type":"ContainerStarted","Data":"6b3a6818707bacf8a8a15add584b0760454a078161cfd006b751e74a4b072766"} Jan 22 07:13:33 crc kubenswrapper[4982]: I0122 07:13:33.927452 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"219b433b-fe06-4df4-856a-faf3851da20f","Type":"ContainerStarted","Data":"e97151d19c2b5e134b5025fce94b9d93ec80fb60af103066c3d68a2fd3ec8538"} Jan 22 07:13:33 crc kubenswrapper[4982]: I0122 07:13:33.946717 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.404340575 podStartE2EDuration="3.946697063s" podCreationTimestamp="2026-01-22 07:13:30 +0000 UTC" firstStartedPulling="2026-01-22 07:13:32.58763601 +0000 UTC m=+5273.426274033" lastFinishedPulling="2026-01-22 07:13:33.129992518 +0000 UTC m=+5273.968630521" observedRunningTime="2026-01-22 07:13:33.941625306 +0000 UTC m=+5274.780263329" watchObservedRunningTime="2026-01-22 07:13:33.946697063 +0000 UTC m=+5274.785335066" Jan 22 07:13:35 crc kubenswrapper[4982]: I0122 07:13:35.665973 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:13:35 crc kubenswrapper[4982]: I0122 07:13:35.745975 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:13:35 crc kubenswrapper[4982]: I0122 07:13:35.746650 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="dnsmasq-dns" containerID="cri-o://c11c2fccf10eb86424599c0a3032cf869bc1543b03a8a1f69bdd20b3de6e881b" gracePeriod=10 Jan 22 07:13:35 crc kubenswrapper[4982]: I0122 07:13:35.949766 4982 generic.go:334] "Generic (PLEG): container finished" podID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerID="c11c2fccf10eb86424599c0a3032cf869bc1543b03a8a1f69bdd20b3de6e881b" exitCode=0 Jan 22 07:13:35 crc kubenswrapper[4982]: I0122 07:13:35.949834 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" event={"ID":"82cab6e1-6ef0-479b-a2f1-dc767439dbc1","Type":"ContainerDied","Data":"c11c2fccf10eb86424599c0a3032cf869bc1543b03a8a1f69bdd20b3de6e881b"} Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.347105 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.492972 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mbjm\" (UniqueName: \"kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm\") pod \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.493043 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config\") pod \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.493107 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc\") pod \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\" (UID: \"82cab6e1-6ef0-479b-a2f1-dc767439dbc1\") " Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.500690 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm" (OuterVolumeSpecName: "kube-api-access-2mbjm") pod "82cab6e1-6ef0-479b-a2f1-dc767439dbc1" (UID: "82cab6e1-6ef0-479b-a2f1-dc767439dbc1"). InnerVolumeSpecName "kube-api-access-2mbjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.533732 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config" (OuterVolumeSpecName: "config") pod "82cab6e1-6ef0-479b-a2f1-dc767439dbc1" (UID: "82cab6e1-6ef0-479b-a2f1-dc767439dbc1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.533965 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82cab6e1-6ef0-479b-a2f1-dc767439dbc1" (UID: "82cab6e1-6ef0-479b-a2f1-dc767439dbc1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.596138 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mbjm\" (UniqueName: \"kubernetes.io/projected/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-kube-api-access-2mbjm\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.596182 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.596195 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82cab6e1-6ef0-479b-a2f1-dc767439dbc1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.962400 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" event={"ID":"82cab6e1-6ef0-479b-a2f1-dc767439dbc1","Type":"ContainerDied","Data":"1dbbe1f4154aed122560806bbeaa32cea06cf319601cd31dae430d7b7b8e3510"} Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.962492 4982 scope.go:117] "RemoveContainer" containerID="c11c2fccf10eb86424599c0a3032cf869bc1543b03a8a1f69bdd20b3de6e881b" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.962526 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-hkgb4" Jan 22 07:13:36 crc kubenswrapper[4982]: I0122 07:13:36.986652 4982 scope.go:117] "RemoveContainer" containerID="f3643643bdfe1f401604e36359db714d6627d955edefbeaecc26c8077c27d153" Jan 22 07:13:37 crc kubenswrapper[4982]: I0122 07:13:37.014369 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:13:37 crc kubenswrapper[4982]: I0122 07:13:37.021616 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-hkgb4"] Jan 22 07:13:37 crc kubenswrapper[4982]: I0122 07:13:37.730588 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" path="/var/lib/kubelet/pods/82cab6e1-6ef0-479b-a2f1-dc767439dbc1/volumes" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.485594 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 22 07:13:39 crc kubenswrapper[4982]: E0122 07:13:39.486637 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="init" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.486700 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="init" Jan 22 07:13:39 crc kubenswrapper[4982]: E0122 07:13:39.486754 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="dnsmasq-dns" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.486820 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="dnsmasq-dns" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.487047 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="82cab6e1-6ef0-479b-a2f1-dc767439dbc1" containerName="dnsmasq-dns" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.487922 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.490062 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-7brrn" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.490220 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.490310 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.499766 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.547940 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-config\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.548026 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-scripts\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.548048 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d7dcb7-9570-45b9-8b62-46af00032f19-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.548070 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63d7dcb7-9570-45b9-8b62-46af00032f19-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.548211 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsqcq\" (UniqueName: \"kubernetes.io/projected/63d7dcb7-9570-45b9-8b62-46af00032f19-kube-api-access-rsqcq\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.649753 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-scripts\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.649800 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d7dcb7-9570-45b9-8b62-46af00032f19-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.649822 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63d7dcb7-9570-45b9-8b62-46af00032f19-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.649897 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsqcq\" (UniqueName: \"kubernetes.io/projected/63d7dcb7-9570-45b9-8b62-46af00032f19-kube-api-access-rsqcq\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.649956 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-config\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.650511 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63d7dcb7-9570-45b9-8b62-46af00032f19-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.650831 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-scripts\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.650910 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63d7dcb7-9570-45b9-8b62-46af00032f19-config\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.656396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d7dcb7-9570-45b9-8b62-46af00032f19-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.670107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsqcq\" (UniqueName: \"kubernetes.io/projected/63d7dcb7-9570-45b9-8b62-46af00032f19-kube-api-access-rsqcq\") pod \"ovn-northd-0\" (UID: \"63d7dcb7-9570-45b9-8b62-46af00032f19\") " pod="openstack/ovn-northd-0" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.819295 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-7brrn" Jan 22 07:13:39 crc kubenswrapper[4982]: I0122 07:13:39.828232 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 07:13:40 crc kubenswrapper[4982]: I0122 07:13:40.280333 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 07:13:40 crc kubenswrapper[4982]: W0122 07:13:40.288081 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63d7dcb7_9570_45b9_8b62_46af00032f19.slice/crio-d8d94f32f432485bdf88be727ad7b3c9172f3fa77c1f553d4e1b861ecab8ccbc WatchSource:0}: Error finding container d8d94f32f432485bdf88be727ad7b3c9172f3fa77c1f553d4e1b861ecab8ccbc: Status 404 returned error can't find the container with id d8d94f32f432485bdf88be727ad7b3c9172f3fa77c1f553d4e1b861ecab8ccbc Jan 22 07:13:41 crc kubenswrapper[4982]: I0122 07:13:41.010323 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"63d7dcb7-9570-45b9-8b62-46af00032f19","Type":"ContainerStarted","Data":"6a8671dfcb23a1a1243407251410c1502538dc22faf851323eec8f3f5e1afb2a"} Jan 22 07:13:41 crc kubenswrapper[4982]: I0122 07:13:41.010884 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"63d7dcb7-9570-45b9-8b62-46af00032f19","Type":"ContainerStarted","Data":"54b00ae0448f43941fbd73972fc7b4f5071d89b1840fca4e3f046e260626d902"} Jan 22 07:13:41 crc kubenswrapper[4982]: I0122 07:13:41.010927 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 22 07:13:41 crc kubenswrapper[4982]: I0122 07:13:41.011185 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"63d7dcb7-9570-45b9-8b62-46af00032f19","Type":"ContainerStarted","Data":"d8d94f32f432485bdf88be727ad7b3c9172f3fa77c1f553d4e1b861ecab8ccbc"} Jan 22 07:13:41 crc kubenswrapper[4982]: I0122 07:13:41.038652 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.038632679 podStartE2EDuration="2.038632679s" podCreationTimestamp="2026-01-22 07:13:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:41.032957086 +0000 UTC m=+5281.871595119" watchObservedRunningTime="2026-01-22 07:13:41.038632679 +0000 UTC m=+5281.877270682" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.596736 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-v9kjh"] Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.598090 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.613086 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-93eb-account-create-update-zhh7w"] Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.614202 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.616554 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.620195 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-v9kjh"] Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.625557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.625632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgqxv\" (UniqueName: \"kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.625745 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.625764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp2kd\" (UniqueName: \"kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.649154 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-93eb-account-create-update-zhh7w"] Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.727036 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.727118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgqxv\" (UniqueName: \"kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.727167 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.727185 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp2kd\" (UniqueName: \"kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.728338 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.728910 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.745805 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp2kd\" (UniqueName: \"kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd\") pod \"keystone-93eb-account-create-update-zhh7w\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.746068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgqxv\" (UniqueName: \"kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv\") pod \"keystone-db-create-v9kjh\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.917368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:44 crc kubenswrapper[4982]: I0122 07:13:44.938517 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:45 crc kubenswrapper[4982]: I0122 07:13:45.382958 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-93eb-account-create-update-zhh7w"] Jan 22 07:13:45 crc kubenswrapper[4982]: I0122 07:13:45.461519 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-v9kjh"] Jan 22 07:13:45 crc kubenswrapper[4982]: W0122 07:13:45.468100 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod169e4b4a_523d_4bef_b648_a7275adac026.slice/crio-1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976 WatchSource:0}: Error finding container 1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976: Status 404 returned error can't find the container with id 1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976 Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.075018 4982 generic.go:334] "Generic (PLEG): container finished" podID="169e4b4a-523d-4bef-b648-a7275adac026" containerID="0134f1ba61dc82f7c6e6b6fdc6b427144bf07b978d7b1049fd7edf7ae7b627e7" exitCode=0 Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.075449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-v9kjh" event={"ID":"169e4b4a-523d-4bef-b648-a7275adac026","Type":"ContainerDied","Data":"0134f1ba61dc82f7c6e6b6fdc6b427144bf07b978d7b1049fd7edf7ae7b627e7"} Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.075493 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-v9kjh" event={"ID":"169e4b4a-523d-4bef-b648-a7275adac026","Type":"ContainerStarted","Data":"1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976"} Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.079079 4982 generic.go:334] "Generic (PLEG): container finished" podID="c46f7e2b-0906-4ac7-8ebd-7f0613495430" containerID="aa9ec77e471a75f91680f04cf86fd9df10e0a53d15bd1850cc5e513c9d7b51d2" exitCode=0 Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.079124 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93eb-account-create-update-zhh7w" event={"ID":"c46f7e2b-0906-4ac7-8ebd-7f0613495430","Type":"ContainerDied","Data":"aa9ec77e471a75f91680f04cf86fd9df10e0a53d15bd1850cc5e513c9d7b51d2"} Jan 22 07:13:46 crc kubenswrapper[4982]: I0122 07:13:46.079149 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93eb-account-create-update-zhh7w" event={"ID":"c46f7e2b-0906-4ac7-8ebd-7f0613495430","Type":"ContainerStarted","Data":"27d3292a29a83733644e053cee9e726138c613189d0dfa78666356a558dd05cd"} Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.461557 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.462716 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.576168 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts\") pod \"169e4b4a-523d-4bef-b648-a7275adac026\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.576227 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts\") pod \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.576338 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgqxv\" (UniqueName: \"kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv\") pod \"169e4b4a-523d-4bef-b648-a7275adac026\" (UID: \"169e4b4a-523d-4bef-b648-a7275adac026\") " Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.576438 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lp2kd\" (UniqueName: \"kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd\") pod \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\" (UID: \"c46f7e2b-0906-4ac7-8ebd-7f0613495430\") " Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.576946 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "169e4b4a-523d-4bef-b648-a7275adac026" (UID: "169e4b4a-523d-4bef-b648-a7275adac026"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.577026 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c46f7e2b-0906-4ac7-8ebd-7f0613495430" (UID: "c46f7e2b-0906-4ac7-8ebd-7f0613495430"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.583147 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv" (OuterVolumeSpecName: "kube-api-access-sgqxv") pod "169e4b4a-523d-4bef-b648-a7275adac026" (UID: "169e4b4a-523d-4bef-b648-a7275adac026"). InnerVolumeSpecName "kube-api-access-sgqxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.583523 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd" (OuterVolumeSpecName: "kube-api-access-lp2kd") pod "c46f7e2b-0906-4ac7-8ebd-7f0613495430" (UID: "c46f7e2b-0906-4ac7-8ebd-7f0613495430"). InnerVolumeSpecName "kube-api-access-lp2kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.678188 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgqxv\" (UniqueName: \"kubernetes.io/projected/169e4b4a-523d-4bef-b648-a7275adac026-kube-api-access-sgqxv\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.678226 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lp2kd\" (UniqueName: \"kubernetes.io/projected/c46f7e2b-0906-4ac7-8ebd-7f0613495430-kube-api-access-lp2kd\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.678238 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/169e4b4a-523d-4bef-b648-a7275adac026-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:47 crc kubenswrapper[4982]: I0122 07:13:47.678248 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c46f7e2b-0906-4ac7-8ebd-7f0613495430-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.094120 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-v9kjh" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.094117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-v9kjh" event={"ID":"169e4b4a-523d-4bef-b648-a7275adac026","Type":"ContainerDied","Data":"1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976"} Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.094546 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a3d7a4c57dc57551c1b8085f39147658d390efab985d23c37494c14f34ff976" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.097163 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93eb-account-create-update-zhh7w" event={"ID":"c46f7e2b-0906-4ac7-8ebd-7f0613495430","Type":"ContainerDied","Data":"27d3292a29a83733644e053cee9e726138c613189d0dfa78666356a558dd05cd"} Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.097201 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27d3292a29a83733644e053cee9e726138c613189d0dfa78666356a558dd05cd" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.097255 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93eb-account-create-update-zhh7w" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.974406 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.974467 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.974510 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.975148 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:13:48 crc kubenswrapper[4982]: I0122 07:13:48.975197 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" gracePeriod=600 Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.089678 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-cht8h"] Jan 22 07:13:50 crc kubenswrapper[4982]: E0122 07:13:50.090497 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169e4b4a-523d-4bef-b648-a7275adac026" containerName="mariadb-database-create" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.090522 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="169e4b4a-523d-4bef-b648-a7275adac026" containerName="mariadb-database-create" Jan 22 07:13:50 crc kubenswrapper[4982]: E0122 07:13:50.090563 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c46f7e2b-0906-4ac7-8ebd-7f0613495430" containerName="mariadb-account-create-update" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.090574 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c46f7e2b-0906-4ac7-8ebd-7f0613495430" containerName="mariadb-account-create-update" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.090892 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c46f7e2b-0906-4ac7-8ebd-7f0613495430" containerName="mariadb-account-create-update" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.090924 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="169e4b4a-523d-4bef-b648-a7275adac026" containerName="mariadb-database-create" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.091697 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.094103 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.094562 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.095097 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pjrrz" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.105109 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cht8h"] Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.106767 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.221913 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.222282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7mkv\" (UniqueName: \"kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.222620 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.325109 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7mkv\" (UniqueName: \"kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.325259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.325360 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.331947 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.338709 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.341443 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7mkv\" (UniqueName: \"kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv\") pod \"keystone-db-sync-cht8h\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.418488 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:50 crc kubenswrapper[4982]: I0122 07:13:50.900462 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cht8h"] Jan 22 07:13:51 crc kubenswrapper[4982]: I0122 07:13:51.121841 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cht8h" event={"ID":"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3","Type":"ContainerStarted","Data":"871cd14dafbea7cc968470104b91d24242734928d1842d115650a4edee27fdc1"} Jan 22 07:13:51 crc kubenswrapper[4982]: I0122 07:13:51.126126 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" exitCode=0 Jan 22 07:13:51 crc kubenswrapper[4982]: I0122 07:13:51.126179 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc"} Jan 22 07:13:51 crc kubenswrapper[4982]: I0122 07:13:51.126354 4982 scope.go:117] "RemoveContainer" containerID="ed00d36a55987051b22728819c2b04fda1364b352be83e13ac9968d0431cf25b" Jan 22 07:13:51 crc kubenswrapper[4982]: E0122 07:13:51.531171 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:13:52 crc kubenswrapper[4982]: I0122 07:13:52.137203 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:13:52 crc kubenswrapper[4982]: E0122 07:13:52.137585 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:13:52 crc kubenswrapper[4982]: I0122 07:13:52.140160 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cht8h" event={"ID":"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3","Type":"ContainerStarted","Data":"bd6e4fb9a9bd45d2f71b866e0ce285be9106062b4a5d29e6d4e3df137c42f8a8"} Jan 22 07:13:52 crc kubenswrapper[4982]: I0122 07:13:52.190946 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-cht8h" podStartSLOduration=2.190923172 podStartE2EDuration="2.190923172s" podCreationTimestamp="2026-01-22 07:13:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:52.18116502 +0000 UTC m=+5293.019803023" watchObservedRunningTime="2026-01-22 07:13:52.190923172 +0000 UTC m=+5293.029561195" Jan 22 07:13:54 crc kubenswrapper[4982]: I0122 07:13:54.166319 4982 generic.go:334] "Generic (PLEG): container finished" podID="fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" containerID="bd6e4fb9a9bd45d2f71b866e0ce285be9106062b4a5d29e6d4e3df137c42f8a8" exitCode=0 Jan 22 07:13:54 crc kubenswrapper[4982]: I0122 07:13:54.166366 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cht8h" event={"ID":"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3","Type":"ContainerDied","Data":"bd6e4fb9a9bd45d2f71b866e0ce285be9106062b4a5d29e6d4e3df137c42f8a8"} Jan 22 07:13:54 crc kubenswrapper[4982]: I0122 07:13:54.885084 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.510777 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.616212 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data\") pod \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.616293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7mkv\" (UniqueName: \"kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv\") pod \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.616356 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle\") pod \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\" (UID: \"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3\") " Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.625194 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv" (OuterVolumeSpecName: "kube-api-access-h7mkv") pod "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" (UID: "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3"). InnerVolumeSpecName "kube-api-access-h7mkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.638086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" (UID: "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.665284 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data" (OuterVolumeSpecName: "config-data") pod "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" (UID: "fa7ce16f-0a3f-4a8b-9684-271e228ad1e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.718582 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.718616 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7mkv\" (UniqueName: \"kubernetes.io/projected/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-kube-api-access-h7mkv\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:55 crc kubenswrapper[4982]: I0122 07:13:55.718627 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.183148 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cht8h" event={"ID":"fa7ce16f-0a3f-4a8b-9684-271e228ad1e3","Type":"ContainerDied","Data":"871cd14dafbea7cc968470104b91d24242734928d1842d115650a4edee27fdc1"} Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.183217 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="871cd14dafbea7cc968470104b91d24242734928d1842d115650a4edee27fdc1" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.183246 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cht8h" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.856629 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:13:56 crc kubenswrapper[4982]: E0122 07:13:56.856930 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" containerName="keystone-db-sync" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.856942 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" containerName="keystone-db-sync" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.857097 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" containerName="keystone-db-sync" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.857859 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.880460 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.894315 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hqrtl"] Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.895328 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.909945 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.910082 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.910115 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.910241 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.910369 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pjrrz" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.938967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.939022 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsm8s\" (UniqueName: \"kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.939050 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.939103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.939146 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:56 crc kubenswrapper[4982]: I0122 07:13:56.957676 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hqrtl"] Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040280 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040361 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsm8s\" (UniqueName: \"kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040422 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040438 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040469 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040491 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vs5k\" (UniqueName: \"kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040522 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.040574 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.041423 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.041451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.041463 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.042021 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.056801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsm8s\" (UniqueName: \"kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s\") pod \"dnsmasq-dns-756f47c947-jbv27\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.142515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.142773 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vs5k\" (UniqueName: \"kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.142921 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.142961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.142986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.143017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.147029 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.149548 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.153957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.154461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.157819 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.164005 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vs5k\" (UniqueName: \"kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k\") pod \"keystone-bootstrap-hqrtl\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.172603 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.216394 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.711808 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hqrtl"] Jan 22 07:13:57 crc kubenswrapper[4982]: I0122 07:13:57.738185 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.202458 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hqrtl" event={"ID":"8d5ff8d6-0319-486b-9e8e-e12e48e2756b","Type":"ContainerStarted","Data":"7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984"} Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.202799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hqrtl" event={"ID":"8d5ff8d6-0319-486b-9e8e-e12e48e2756b","Type":"ContainerStarted","Data":"5a611cba05d176fd058b32d242c6614e01ac232c08b3e748bab75ff85962ff9d"} Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.205488 4982 generic.go:334] "Generic (PLEG): container finished" podID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerID="3df9fa92a5f6b608fd7673474a3f3adc9a195ea15bf5c3d69d06532992843bf2" exitCode=0 Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.205524 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756f47c947-jbv27" event={"ID":"d96b74b5-6a64-4631-b1b1-c0bc3f173606","Type":"ContainerDied","Data":"3df9fa92a5f6b608fd7673474a3f3adc9a195ea15bf5c3d69d06532992843bf2"} Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.205546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756f47c947-jbv27" event={"ID":"d96b74b5-6a64-4631-b1b1-c0bc3f173606","Type":"ContainerStarted","Data":"88b3add774a4aa3f20d0ae888ca0c1b509a8299f17370fa665ed73e8e5b7e2a0"} Jan 22 07:13:58 crc kubenswrapper[4982]: I0122 07:13:58.247686 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hqrtl" podStartSLOduration=2.247659214 podStartE2EDuration="2.247659214s" podCreationTimestamp="2026-01-22 07:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:58.220525353 +0000 UTC m=+5299.059163356" watchObservedRunningTime="2026-01-22 07:13:58.247659214 +0000 UTC m=+5299.086297217" Jan 22 07:13:59 crc kubenswrapper[4982]: I0122 07:13:59.218819 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756f47c947-jbv27" event={"ID":"d96b74b5-6a64-4631-b1b1-c0bc3f173606","Type":"ContainerStarted","Data":"0ae8f24d6862b809e54ac6c0411cf7967192f507288f2b580c27258ef40a2ae8"} Jan 22 07:13:59 crc kubenswrapper[4982]: I0122 07:13:59.257033 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-756f47c947-jbv27" podStartSLOduration=3.257015992 podStartE2EDuration="3.257015992s" podCreationTimestamp="2026-01-22 07:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:13:59.254225507 +0000 UTC m=+5300.092863520" watchObservedRunningTime="2026-01-22 07:13:59.257015992 +0000 UTC m=+5300.095653995" Jan 22 07:14:00 crc kubenswrapper[4982]: I0122 07:14:00.234363 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:14:01 crc kubenswrapper[4982]: E0122 07:14:01.904706 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d5ff8d6_0319_486b_9e8e_e12e48e2756b.slice/crio-7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d5ff8d6_0319_486b_9e8e_e12e48e2756b.slice/crio-conmon-7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:14:02 crc kubenswrapper[4982]: I0122 07:14:02.250387 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d5ff8d6-0319-486b-9e8e-e12e48e2756b" containerID="7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984" exitCode=0 Jan 22 07:14:02 crc kubenswrapper[4982]: I0122 07:14:02.250429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hqrtl" event={"ID":"8d5ff8d6-0319-486b-9e8e-e12e48e2756b","Type":"ContainerDied","Data":"7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984"} Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.720714 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.836220 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.836806 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vs5k\" (UniqueName: \"kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.836990 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.837198 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.837295 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.837492 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys\") pod \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\" (UID: \"8d5ff8d6-0319-486b-9e8e-e12e48e2756b\") " Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.841839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts" (OuterVolumeSpecName: "scripts") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.841950 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.844017 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.844649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k" (OuterVolumeSpecName: "kube-api-access-9vs5k") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "kube-api-access-9vs5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.858325 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data" (OuterVolumeSpecName: "config-data") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.858357 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d5ff8d6-0319-486b-9e8e-e12e48e2756b" (UID: "8d5ff8d6-0319-486b-9e8e-e12e48e2756b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940106 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940310 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940320 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940329 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940338 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vs5k\" (UniqueName: \"kubernetes.io/projected/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-kube-api-access-9vs5k\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:03 crc kubenswrapper[4982]: I0122 07:14:03.940346 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d5ff8d6-0319-486b-9e8e-e12e48e2756b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.271102 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hqrtl" event={"ID":"8d5ff8d6-0319-486b-9e8e-e12e48e2756b","Type":"ContainerDied","Data":"5a611cba05d176fd058b32d242c6614e01ac232c08b3e748bab75ff85962ff9d"} Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.271158 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a611cba05d176fd058b32d242c6614e01ac232c08b3e748bab75ff85962ff9d" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.271206 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hqrtl" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.362421 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hqrtl"] Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.370615 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hqrtl"] Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.442613 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-pd22k"] Jan 22 07:14:04 crc kubenswrapper[4982]: E0122 07:14:04.442929 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d5ff8d6-0319-486b-9e8e-e12e48e2756b" containerName="keystone-bootstrap" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.442947 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d5ff8d6-0319-486b-9e8e-e12e48e2756b" containerName="keystone-bootstrap" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.443093 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d5ff8d6-0319-486b-9e8e-e12e48e2756b" containerName="keystone-bootstrap" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.443632 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.453134 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.460377 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pd22k"] Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.460899 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.461237 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pjrrz" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.461395 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.461562 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552492 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552547 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szb5x\" (UniqueName: \"kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552584 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552610 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552625 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.552652 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.653959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.654006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szb5x\" (UniqueName: \"kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.654037 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.654062 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.654076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.654118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.658361 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.659100 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.659111 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.659318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.669257 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.674062 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szb5x\" (UniqueName: \"kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x\") pod \"keystone-bootstrap-pd22k\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.719444 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:14:04 crc kubenswrapper[4982]: E0122 07:14:04.719802 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:14:04 crc kubenswrapper[4982]: I0122 07:14:04.841623 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:05 crc kubenswrapper[4982]: I0122 07:14:05.337951 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pd22k"] Jan 22 07:14:05 crc kubenswrapper[4982]: I0122 07:14:05.735134 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d5ff8d6-0319-486b-9e8e-e12e48e2756b" path="/var/lib/kubelet/pods/8d5ff8d6-0319-486b-9e8e-e12e48e2756b/volumes" Jan 22 07:14:06 crc kubenswrapper[4982]: I0122 07:14:06.290771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pd22k" event={"ID":"7875445a-de0e-442e-9e16-1933fda0823e","Type":"ContainerStarted","Data":"f5cd44ff5f870adef672c322952c2739c2eab112842978eb06f6db5aa5b2fe21"} Jan 22 07:14:06 crc kubenswrapper[4982]: I0122 07:14:06.290817 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pd22k" event={"ID":"7875445a-de0e-442e-9e16-1933fda0823e","Type":"ContainerStarted","Data":"b5f3ab18924eb95d0e994839ccffd0473ba0af6400b641bd3754d33ba2c2973b"} Jan 22 07:14:06 crc kubenswrapper[4982]: I0122 07:14:06.328350 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-pd22k" podStartSLOduration=2.328329287 podStartE2EDuration="2.328329287s" podCreationTimestamp="2026-01-22 07:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:14:06.321626427 +0000 UTC m=+5307.160264420" watchObservedRunningTime="2026-01-22 07:14:06.328329287 +0000 UTC m=+5307.166967290" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.174778 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.241764 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.242069 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-587db994f9-tcc86" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="dnsmasq-dns" containerID="cri-o://98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290" gracePeriod=10 Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.813508 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.915273 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb\") pod \"f949af24-9fbd-416d-b200-4c1c2d36275b\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.915341 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc\") pod \"f949af24-9fbd-416d-b200-4c1c2d36275b\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.915483 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n5tb\" (UniqueName: \"kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb\") pod \"f949af24-9fbd-416d-b200-4c1c2d36275b\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.915505 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb\") pod \"f949af24-9fbd-416d-b200-4c1c2d36275b\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.915555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config\") pod \"f949af24-9fbd-416d-b200-4c1c2d36275b\" (UID: \"f949af24-9fbd-416d-b200-4c1c2d36275b\") " Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.920943 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb" (OuterVolumeSpecName: "kube-api-access-6n5tb") pod "f949af24-9fbd-416d-b200-4c1c2d36275b" (UID: "f949af24-9fbd-416d-b200-4c1c2d36275b"). InnerVolumeSpecName "kube-api-access-6n5tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.953707 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config" (OuterVolumeSpecName: "config") pod "f949af24-9fbd-416d-b200-4c1c2d36275b" (UID: "f949af24-9fbd-416d-b200-4c1c2d36275b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.963644 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f949af24-9fbd-416d-b200-4c1c2d36275b" (UID: "f949af24-9fbd-416d-b200-4c1c2d36275b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.963756 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f949af24-9fbd-416d-b200-4c1c2d36275b" (UID: "f949af24-9fbd-416d-b200-4c1c2d36275b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:14:07 crc kubenswrapper[4982]: I0122 07:14:07.966593 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f949af24-9fbd-416d-b200-4c1c2d36275b" (UID: "f949af24-9fbd-416d-b200-4c1c2d36275b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.017509 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n5tb\" (UniqueName: \"kubernetes.io/projected/f949af24-9fbd-416d-b200-4c1c2d36275b-kube-api-access-6n5tb\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.017543 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.017554 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.017563 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.017572 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f949af24-9fbd-416d-b200-4c1c2d36275b-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.327928 4982 generic.go:334] "Generic (PLEG): container finished" podID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerID="98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290" exitCode=0 Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.327978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-587db994f9-tcc86" event={"ID":"f949af24-9fbd-416d-b200-4c1c2d36275b","Type":"ContainerDied","Data":"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290"} Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.328017 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-587db994f9-tcc86" event={"ID":"f949af24-9fbd-416d-b200-4c1c2d36275b","Type":"ContainerDied","Data":"17a20b4f130afe1b864503d892e05d66baa4d4b289951786ca75dec4b18dfe9d"} Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.328039 4982 scope.go:117] "RemoveContainer" containerID="98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.328045 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-587db994f9-tcc86" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.352553 4982 scope.go:117] "RemoveContainer" containerID="81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.417899 4982 scope.go:117] "RemoveContainer" containerID="98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290" Jan 22 07:14:08 crc kubenswrapper[4982]: E0122 07:14:08.430571 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290\": container with ID starting with 98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290 not found: ID does not exist" containerID="98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.430636 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290"} err="failed to get container status \"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290\": rpc error: code = NotFound desc = could not find container \"98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290\": container with ID starting with 98f59cde21b6a1f6e634bb5eca96ceb9403a896658bfb472ae2dfd9b6452e290 not found: ID does not exist" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.430672 4982 scope.go:117] "RemoveContainer" containerID="81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1" Jan 22 07:14:08 crc kubenswrapper[4982]: E0122 07:14:08.431357 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1\": container with ID starting with 81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1 not found: ID does not exist" containerID="81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.431402 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1"} err="failed to get container status \"81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1\": rpc error: code = NotFound desc = could not find container \"81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1\": container with ID starting with 81e9a3bc19e9ab8d9140fd8b726f2c9061e768c57a37fae651713405c50659d1 not found: ID does not exist" Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.433275 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:14:08 crc kubenswrapper[4982]: I0122 07:14:08.439241 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-587db994f9-tcc86"] Jan 22 07:14:09 crc kubenswrapper[4982]: I0122 07:14:09.733434 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" path="/var/lib/kubelet/pods/f949af24-9fbd-416d-b200-4c1c2d36275b/volumes" Jan 22 07:14:10 crc kubenswrapper[4982]: I0122 07:14:10.347286 4982 generic.go:334] "Generic (PLEG): container finished" podID="7875445a-de0e-442e-9e16-1933fda0823e" containerID="f5cd44ff5f870adef672c322952c2739c2eab112842978eb06f6db5aa5b2fe21" exitCode=0 Jan 22 07:14:10 crc kubenswrapper[4982]: I0122 07:14:10.347376 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pd22k" event={"ID":"7875445a-de0e-442e-9e16-1933fda0823e","Type":"ContainerDied","Data":"f5cd44ff5f870adef672c322952c2739c2eab112842978eb06f6db5aa5b2fe21"} Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.659372 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775604 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775629 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775757 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szb5x\" (UniqueName: \"kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775790 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.775844 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data\") pod \"7875445a-de0e-442e-9e16-1933fda0823e\" (UID: \"7875445a-de0e-442e-9e16-1933fda0823e\") " Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.780945 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.780967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.781087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x" (OuterVolumeSpecName: "kube-api-access-szb5x") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "kube-api-access-szb5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.781302 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts" (OuterVolumeSpecName: "scripts") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.797111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.799094 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data" (OuterVolumeSpecName: "config-data") pod "7875445a-de0e-442e-9e16-1933fda0823e" (UID: "7875445a-de0e-442e-9e16-1933fda0823e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878605 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szb5x\" (UniqueName: \"kubernetes.io/projected/7875445a-de0e-442e-9e16-1933fda0823e-kube-api-access-szb5x\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878638 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878651 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878662 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878670 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:11 crc kubenswrapper[4982]: I0122 07:14:11.878681 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7875445a-de0e-442e-9e16-1933fda0823e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.363577 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pd22k" event={"ID":"7875445a-de0e-442e-9e16-1933fda0823e","Type":"ContainerDied","Data":"b5f3ab18924eb95d0e994839ccffd0473ba0af6400b641bd3754d33ba2c2973b"} Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.363870 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5f3ab18924eb95d0e994839ccffd0473ba0af6400b641bd3754d33ba2c2973b" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.363629 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pd22k" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.465650 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-757b4696-655wn"] Jan 22 07:14:12 crc kubenswrapper[4982]: E0122 07:14:12.466010 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="dnsmasq-dns" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466022 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="dnsmasq-dns" Jan 22 07:14:12 crc kubenswrapper[4982]: E0122 07:14:12.466036 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="init" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466042 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="init" Jan 22 07:14:12 crc kubenswrapper[4982]: E0122 07:14:12.466055 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7875445a-de0e-442e-9e16-1933fda0823e" containerName="keystone-bootstrap" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466063 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7875445a-de0e-442e-9e16-1933fda0823e" containerName="keystone-bootstrap" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466240 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7875445a-de0e-442e-9e16-1933fda0823e" containerName="keystone-bootstrap" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466268 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f949af24-9fbd-416d-b200-4c1c2d36275b" containerName="dnsmasq-dns" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.466762 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.468835 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.470597 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.471302 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.481242 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pjrrz" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.482225 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-757b4696-655wn"] Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590261 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-credential-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590319 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-fernet-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590336 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-config-data\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-scripts\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590388 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-combined-ca-bundle\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.590427 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg2v7\" (UniqueName: \"kubernetes.io/projected/47389627-50a0-42af-b9df-efef6b63429f-kube-api-access-lg2v7\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.691679 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg2v7\" (UniqueName: \"kubernetes.io/projected/47389627-50a0-42af-b9df-efef6b63429f-kube-api-access-lg2v7\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.691802 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-credential-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.691842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-fernet-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.691871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-config-data\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.692648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-scripts\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.692679 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-combined-ca-bundle\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.696474 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-config-data\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.697491 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-combined-ca-bundle\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.701319 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-credential-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.715542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg2v7\" (UniqueName: \"kubernetes.io/projected/47389627-50a0-42af-b9df-efef6b63429f-kube-api-access-lg2v7\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.715621 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-scripts\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.720474 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47389627-50a0-42af-b9df-efef6b63429f-fernet-keys\") pod \"keystone-757b4696-655wn\" (UID: \"47389627-50a0-42af-b9df-efef6b63429f\") " pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:12 crc kubenswrapper[4982]: I0122 07:14:12.785221 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:13 crc kubenswrapper[4982]: I0122 07:14:13.204189 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-757b4696-655wn"] Jan 22 07:14:13 crc kubenswrapper[4982]: W0122 07:14:13.213934 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47389627_50a0_42af_b9df_efef6b63429f.slice/crio-b807f9e8345920bff13280fea63294d5f9b4d9f954f82fd58d2d26617fc7d690 WatchSource:0}: Error finding container b807f9e8345920bff13280fea63294d5f9b4d9f954f82fd58d2d26617fc7d690: Status 404 returned error can't find the container with id b807f9e8345920bff13280fea63294d5f9b4d9f954f82fd58d2d26617fc7d690 Jan 22 07:14:13 crc kubenswrapper[4982]: I0122 07:14:13.374328 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-757b4696-655wn" event={"ID":"47389627-50a0-42af-b9df-efef6b63429f","Type":"ContainerStarted","Data":"b807f9e8345920bff13280fea63294d5f9b4d9f954f82fd58d2d26617fc7d690"} Jan 22 07:14:14 crc kubenswrapper[4982]: I0122 07:14:14.386598 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-757b4696-655wn" event={"ID":"47389627-50a0-42af-b9df-efef6b63429f","Type":"ContainerStarted","Data":"e6d2ff0e97d04ed8e8b5cd7a585309b84eb80df54e94d0410b070eee9bef95e7"} Jan 22 07:14:14 crc kubenswrapper[4982]: I0122 07:14:14.386794 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:14 crc kubenswrapper[4982]: I0122 07:14:14.420372 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-757b4696-655wn" podStartSLOduration=2.420353852 podStartE2EDuration="2.420353852s" podCreationTimestamp="2026-01-22 07:14:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:14:14.415540633 +0000 UTC m=+5315.254178646" watchObservedRunningTime="2026-01-22 07:14:14.420353852 +0000 UTC m=+5315.258991855" Jan 22 07:14:19 crc kubenswrapper[4982]: I0122 07:14:19.725410 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:14:19 crc kubenswrapper[4982]: E0122 07:14:19.726331 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:14:34 crc kubenswrapper[4982]: I0122 07:14:34.719967 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:14:34 crc kubenswrapper[4982]: E0122 07:14:34.721034 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.257333 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-757b4696-655wn" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.595242 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.596462 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.598432 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.598529 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wg7m8" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.598779 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.604289 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.661517 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.661687 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.661770 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59vrx\" (UniqueName: \"kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.763554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59vrx\" (UniqueName: \"kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.763686 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.763806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.765277 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.769557 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.782583 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59vrx\" (UniqueName: \"kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx\") pod \"openstackclient\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " pod="openstack/openstackclient" Jan 22 07:14:44 crc kubenswrapper[4982]: I0122 07:14:44.952915 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:14:45 crc kubenswrapper[4982]: I0122 07:14:45.392560 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 07:14:45 crc kubenswrapper[4982]: I0122 07:14:45.643525 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1fde4825-4373-436f-9da0-2ee79b723de4","Type":"ContainerStarted","Data":"3eac9a621b37b3966932af9782a9ff83fd641d4e9152e0069c2dba840103bf74"} Jan 22 07:14:46 crc kubenswrapper[4982]: I0122 07:14:46.652211 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1fde4825-4373-436f-9da0-2ee79b723de4","Type":"ContainerStarted","Data":"cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21"} Jan 22 07:14:46 crc kubenswrapper[4982]: I0122 07:14:46.671942 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.671923967 podStartE2EDuration="2.671923967s" podCreationTimestamp="2026-01-22 07:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:14:46.664761814 +0000 UTC m=+5347.503399827" watchObservedRunningTime="2026-01-22 07:14:46.671923967 +0000 UTC m=+5347.510561970" Jan 22 07:14:47 crc kubenswrapper[4982]: I0122 07:14:47.719915 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:14:47 crc kubenswrapper[4982]: E0122 07:14:47.720545 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.158983 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8"] Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.161021 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.174270 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.175161 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.193099 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8"] Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.230239 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.230636 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.230840 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww86t\" (UniqueName: \"kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.332790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.334900 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww86t\" (UniqueName: \"kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.335387 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.337293 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.350136 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.359347 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww86t\" (UniqueName: \"kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t\") pod \"collect-profiles-29484435-wtgn8\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.498816 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:00 crc kubenswrapper[4982]: I0122 07:15:00.974520 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8"] Jan 22 07:15:01 crc kubenswrapper[4982]: I0122 07:15:01.721619 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:15:01 crc kubenswrapper[4982]: E0122 07:15:01.723096 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:15:01 crc kubenswrapper[4982]: I0122 07:15:01.781276 4982 generic.go:334] "Generic (PLEG): container finished" podID="4bf985b4-36db-45ac-9483-052d61149d7f" containerID="0507c399863ce1a2250627b10f426ab3b64919c3a79c12c0181e9b0efdf1a149" exitCode=0 Jan 22 07:15:01 crc kubenswrapper[4982]: I0122 07:15:01.781345 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" event={"ID":"4bf985b4-36db-45ac-9483-052d61149d7f","Type":"ContainerDied","Data":"0507c399863ce1a2250627b10f426ab3b64919c3a79c12c0181e9b0efdf1a149"} Jan 22 07:15:01 crc kubenswrapper[4982]: I0122 07:15:01.781437 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" event={"ID":"4bf985b4-36db-45ac-9483-052d61149d7f","Type":"ContainerStarted","Data":"0f0729aeeae6968dc8c2415252a3e1aba25cda3c356dfd10d3a243cb14cd35a2"} Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.080705 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.229609 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume\") pod \"4bf985b4-36db-45ac-9483-052d61149d7f\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.229706 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww86t\" (UniqueName: \"kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t\") pod \"4bf985b4-36db-45ac-9483-052d61149d7f\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.229784 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume\") pod \"4bf985b4-36db-45ac-9483-052d61149d7f\" (UID: \"4bf985b4-36db-45ac-9483-052d61149d7f\") " Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.230683 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume" (OuterVolumeSpecName: "config-volume") pod "4bf985b4-36db-45ac-9483-052d61149d7f" (UID: "4bf985b4-36db-45ac-9483-052d61149d7f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.235794 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t" (OuterVolumeSpecName: "kube-api-access-ww86t") pod "4bf985b4-36db-45ac-9483-052d61149d7f" (UID: "4bf985b4-36db-45ac-9483-052d61149d7f"). InnerVolumeSpecName "kube-api-access-ww86t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.236019 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4bf985b4-36db-45ac-9483-052d61149d7f" (UID: "4bf985b4-36db-45ac-9483-052d61149d7f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.332133 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bf985b4-36db-45ac-9483-052d61149d7f-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.332170 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww86t\" (UniqueName: \"kubernetes.io/projected/4bf985b4-36db-45ac-9483-052d61149d7f-kube-api-access-ww86t\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.332180 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bf985b4-36db-45ac-9483-052d61149d7f-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.798374 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" event={"ID":"4bf985b4-36db-45ac-9483-052d61149d7f","Type":"ContainerDied","Data":"0f0729aeeae6968dc8c2415252a3e1aba25cda3c356dfd10d3a243cb14cd35a2"} Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.798734 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8" Jan 22 07:15:03 crc kubenswrapper[4982]: I0122 07:15:03.799984 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f0729aeeae6968dc8c2415252a3e1aba25cda3c356dfd10d3a243cb14cd35a2" Jan 22 07:15:04 crc kubenswrapper[4982]: I0122 07:15:04.157732 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh"] Jan 22 07:15:04 crc kubenswrapper[4982]: I0122 07:15:04.166358 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484390-svtdh"] Jan 22 07:15:05 crc kubenswrapper[4982]: I0122 07:15:05.729020 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d534a1e1-d6f5-4ceb-9129-51b4761a3745" path="/var/lib/kubelet/pods/d534a1e1-d6f5-4ceb-9129-51b4761a3745/volumes" Jan 22 07:15:16 crc kubenswrapper[4982]: I0122 07:15:16.719648 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:15:16 crc kubenswrapper[4982]: E0122 07:15:16.720998 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:15:20 crc kubenswrapper[4982]: I0122 07:15:20.941958 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:20 crc kubenswrapper[4982]: E0122 07:15:20.943658 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf985b4-36db-45ac-9483-052d61149d7f" containerName="collect-profiles" Jan 22 07:15:20 crc kubenswrapper[4982]: I0122 07:15:20.943744 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf985b4-36db-45ac-9483-052d61149d7f" containerName="collect-profiles" Jan 22 07:15:20 crc kubenswrapper[4982]: I0122 07:15:20.943966 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bf985b4-36db-45ac-9483-052d61149d7f" containerName="collect-profiles" Jan 22 07:15:20 crc kubenswrapper[4982]: I0122 07:15:20.945998 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:20 crc kubenswrapper[4982]: I0122 07:15:20.962905 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.121315 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.121403 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7q5x\" (UniqueName: \"kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.121446 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.223380 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.223504 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7q5x\" (UniqueName: \"kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.223564 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.224157 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.224733 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.247302 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7q5x\" (UniqueName: \"kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x\") pod \"community-operators-fvdk2\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.272151 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:21 crc kubenswrapper[4982]: I0122 07:15:21.793018 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:21 crc kubenswrapper[4982]: W0122 07:15:21.799505 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7c8f367_8cef_467f_806a_ed8a5b068526.slice/crio-3e9dd63f0b83d11bb0f97b5b70628686d21e16ca801b832c31ae66d5bb495dd5 WatchSource:0}: Error finding container 3e9dd63f0b83d11bb0f97b5b70628686d21e16ca801b832c31ae66d5bb495dd5: Status 404 returned error can't find the container with id 3e9dd63f0b83d11bb0f97b5b70628686d21e16ca801b832c31ae66d5bb495dd5 Jan 22 07:15:22 crc kubenswrapper[4982]: I0122 07:15:22.006490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerStarted","Data":"3e9dd63f0b83d11bb0f97b5b70628686d21e16ca801b832c31ae66d5bb495dd5"} Jan 22 07:15:23 crc kubenswrapper[4982]: I0122 07:15:23.015436 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerID="082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d" exitCode=0 Jan 22 07:15:23 crc kubenswrapper[4982]: I0122 07:15:23.015494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerDied","Data":"082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d"} Jan 22 07:15:25 crc kubenswrapper[4982]: I0122 07:15:25.033068 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerStarted","Data":"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb"} Jan 22 07:15:26 crc kubenswrapper[4982]: I0122 07:15:26.041872 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerID="4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb" exitCode=0 Jan 22 07:15:26 crc kubenswrapper[4982]: I0122 07:15:26.041914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerDied","Data":"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb"} Jan 22 07:15:29 crc kubenswrapper[4982]: I0122 07:15:29.726634 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:15:29 crc kubenswrapper[4982]: E0122 07:15:29.727428 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:15:30 crc kubenswrapper[4982]: I0122 07:15:30.094945 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerStarted","Data":"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9"} Jan 22 07:15:30 crc kubenswrapper[4982]: I0122 07:15:30.135241 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fvdk2" podStartSLOduration=3.93914542 podStartE2EDuration="10.135210784s" podCreationTimestamp="2026-01-22 07:15:20 +0000 UTC" firstStartedPulling="2026-01-22 07:15:23.017483669 +0000 UTC m=+5383.856121692" lastFinishedPulling="2026-01-22 07:15:29.213549013 +0000 UTC m=+5390.052187056" observedRunningTime="2026-01-22 07:15:30.130934439 +0000 UTC m=+5390.969572442" watchObservedRunningTime="2026-01-22 07:15:30.135210784 +0000 UTC m=+5390.973848807" Jan 22 07:15:31 crc kubenswrapper[4982]: I0122 07:15:31.273318 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:31 crc kubenswrapper[4982]: I0122 07:15:31.273815 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:32 crc kubenswrapper[4982]: I0122 07:15:32.326386 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-fvdk2" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="registry-server" probeResult="failure" output=< Jan 22 07:15:32 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:15:32 crc kubenswrapper[4982]: > Jan 22 07:15:41 crc kubenswrapper[4982]: I0122 07:15:41.326876 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:41 crc kubenswrapper[4982]: I0122 07:15:41.381285 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:41 crc kubenswrapper[4982]: I0122 07:15:41.578118 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:42 crc kubenswrapper[4982]: I0122 07:15:42.055913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-dkh6j"] Jan 22 07:15:42 crc kubenswrapper[4982]: I0122 07:15:42.066251 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-dkh6j"] Jan 22 07:15:42 crc kubenswrapper[4982]: I0122 07:15:42.719262 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:15:42 crc kubenswrapper[4982]: E0122 07:15:42.719580 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:15:43 crc kubenswrapper[4982]: I0122 07:15:43.196239 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fvdk2" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="registry-server" containerID="cri-o://40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9" gracePeriod=2 Jan 22 07:15:43 crc kubenswrapper[4982]: I0122 07:15:43.734020 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44f14004-87c3-4799-bd61-c491d7e177f8" path="/var/lib/kubelet/pods/44f14004-87c3-4799-bd61-c491d7e177f8/volumes" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.155652 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.218180 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities\") pod \"e7c8f367-8cef-467f-806a-ed8a5b068526\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.218508 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content\") pod \"e7c8f367-8cef-467f-806a-ed8a5b068526\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.218586 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7q5x\" (UniqueName: \"kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x\") pod \"e7c8f367-8cef-467f-806a-ed8a5b068526\" (UID: \"e7c8f367-8cef-467f-806a-ed8a5b068526\") " Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.220144 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities" (OuterVolumeSpecName: "utilities") pod "e7c8f367-8cef-467f-806a-ed8a5b068526" (UID: "e7c8f367-8cef-467f-806a-ed8a5b068526"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.220622 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerID="40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9" exitCode=0 Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.220698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerDied","Data":"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9"} Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.220745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvdk2" event={"ID":"e7c8f367-8cef-467f-806a-ed8a5b068526","Type":"ContainerDied","Data":"3e9dd63f0b83d11bb0f97b5b70628686d21e16ca801b832c31ae66d5bb495dd5"} Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.220769 4982 scope.go:117] "RemoveContainer" containerID="40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.221061 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvdk2" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.232250 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x" (OuterVolumeSpecName: "kube-api-access-s7q5x") pod "e7c8f367-8cef-467f-806a-ed8a5b068526" (UID: "e7c8f367-8cef-467f-806a-ed8a5b068526"). InnerVolumeSpecName "kube-api-access-s7q5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.290317 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7c8f367-8cef-467f-806a-ed8a5b068526" (UID: "e7c8f367-8cef-467f-806a-ed8a5b068526"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.295749 4982 scope.go:117] "RemoveContainer" containerID="4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.316676 4982 scope.go:117] "RemoveContainer" containerID="082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.320415 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.320440 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7c8f367-8cef-467f-806a-ed8a5b068526-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.320450 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7q5x\" (UniqueName: \"kubernetes.io/projected/e7c8f367-8cef-467f-806a-ed8a5b068526-kube-api-access-s7q5x\") on node \"crc\" DevicePath \"\"" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.349380 4982 scope.go:117] "RemoveContainer" containerID="40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9" Jan 22 07:15:44 crc kubenswrapper[4982]: E0122 07:15:44.349808 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9\": container with ID starting with 40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9 not found: ID does not exist" containerID="40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.349958 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9"} err="failed to get container status \"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9\": rpc error: code = NotFound desc = could not find container \"40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9\": container with ID starting with 40c82335ea8916fd6f51a528bc19f023d4da2607525f2b9357c7ae633274b8b9 not found: ID does not exist" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.350054 4982 scope.go:117] "RemoveContainer" containerID="4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb" Jan 22 07:15:44 crc kubenswrapper[4982]: E0122 07:15:44.350592 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb\": container with ID starting with 4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb not found: ID does not exist" containerID="4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.350626 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb"} err="failed to get container status \"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb\": rpc error: code = NotFound desc = could not find container \"4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb\": container with ID starting with 4d9be6699de7c2b9f7861fb9640f4a6bba0b1791f7406b94249925fc58235deb not found: ID does not exist" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.350652 4982 scope.go:117] "RemoveContainer" containerID="082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d" Jan 22 07:15:44 crc kubenswrapper[4982]: E0122 07:15:44.351247 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d\": container with ID starting with 082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d not found: ID does not exist" containerID="082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.351266 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d"} err="failed to get container status \"082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d\": rpc error: code = NotFound desc = could not find container \"082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d\": container with ID starting with 082d1aea76394880db29856f77803f414711f228957a7f8523162678fa586e4d not found: ID does not exist" Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.573634 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:44 crc kubenswrapper[4982]: I0122 07:15:44.584812 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fvdk2"] Jan 22 07:15:45 crc kubenswrapper[4982]: I0122 07:15:45.728741 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" path="/var/lib/kubelet/pods/e7c8f367-8cef-467f-806a-ed8a5b068526/volumes" Jan 22 07:15:55 crc kubenswrapper[4982]: I0122 07:15:55.718836 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:15:55 crc kubenswrapper[4982]: E0122 07:15:55.719645 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:16:02 crc kubenswrapper[4982]: I0122 07:16:02.981977 4982 scope.go:117] "RemoveContainer" containerID="2ecc1a5801f67760fed62cc01f11d6a0511eb3ed7d34a1793695fb97242d2ab7" Jan 22 07:16:03 crc kubenswrapper[4982]: I0122 07:16:03.005175 4982 scope.go:117] "RemoveContainer" containerID="b82e6c43895cebfb4bb14e31e44685579305a7be4e90848cdf322215a5d1f29b" Jan 22 07:16:08 crc kubenswrapper[4982]: I0122 07:16:08.719667 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:16:08 crc kubenswrapper[4982]: E0122 07:16:08.720497 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.171689 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-qnsmv"] Jan 22 07:16:10 crc kubenswrapper[4982]: E0122 07:16:10.172406 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="extract-content" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.172427 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="extract-content" Jan 22 07:16:10 crc kubenswrapper[4982]: E0122 07:16:10.172448 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="registry-server" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.172455 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="registry-server" Jan 22 07:16:10 crc kubenswrapper[4982]: E0122 07:16:10.172468 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="extract-utilities" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.172477 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="extract-utilities" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.172729 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7c8f367-8cef-467f-806a-ed8a5b068526" containerName="registry-server" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.173337 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.180571 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-qnsmv"] Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.272613 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1e21-account-create-update-kbzw6"] Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.273637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.275726 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.284989 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1e21-account-create-update-kbzw6"] Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.314183 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5jhd\" (UniqueName: \"kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.315187 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.417357 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5jhd\" (UniqueName: \"kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.417406 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.417465 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.417513 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sp9l\" (UniqueName: \"kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.418318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.438872 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5jhd\" (UniqueName: \"kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd\") pod \"barbican-db-create-qnsmv\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.491614 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.519619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.519704 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sp9l\" (UniqueName: \"kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.520548 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.538796 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sp9l\" (UniqueName: \"kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l\") pod \"barbican-1e21-account-create-update-kbzw6\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.589598 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.879163 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1e21-account-create-update-kbzw6"] Jan 22 07:16:10 crc kubenswrapper[4982]: I0122 07:16:10.936327 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-qnsmv"] Jan 22 07:16:10 crc kubenswrapper[4982]: W0122 07:16:10.940231 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5943822_9460_4636_881e_1a37d605b4c6.slice/crio-06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9 WatchSource:0}: Error finding container 06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9: Status 404 returned error can't find the container with id 06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9 Jan 22 07:16:11 crc kubenswrapper[4982]: I0122 07:16:11.485925 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qnsmv" event={"ID":"d5943822-9460-4636-881e-1a37d605b4c6","Type":"ContainerStarted","Data":"06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9"} Jan 22 07:16:11 crc kubenswrapper[4982]: I0122 07:16:11.487270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1e21-account-create-update-kbzw6" event={"ID":"d28c8926-6e80-4666-8051-6decfafa89f2","Type":"ContainerStarted","Data":"ee79dd892430dcb068c931abe429bce2582123e1f71a496c4093e0d6c82e567b"} Jan 22 07:16:12 crc kubenswrapper[4982]: I0122 07:16:12.503495 4982 generic.go:334] "Generic (PLEG): container finished" podID="d5943822-9460-4636-881e-1a37d605b4c6" containerID="cf075430cd109d549993ac7c9f8dab26b6eddc22f78e512d93c12e0470c0a501" exitCode=0 Jan 22 07:16:12 crc kubenswrapper[4982]: I0122 07:16:12.503686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qnsmv" event={"ID":"d5943822-9460-4636-881e-1a37d605b4c6","Type":"ContainerDied","Data":"cf075430cd109d549993ac7c9f8dab26b6eddc22f78e512d93c12e0470c0a501"} Jan 22 07:16:12 crc kubenswrapper[4982]: I0122 07:16:12.507895 4982 generic.go:334] "Generic (PLEG): container finished" podID="d28c8926-6e80-4666-8051-6decfafa89f2" containerID="8c0c5c8f5d0268219022e3a0d3a8996b4f72ba2d3516a2f360ce1e767696eebe" exitCode=0 Jan 22 07:16:12 crc kubenswrapper[4982]: I0122 07:16:12.508044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1e21-account-create-update-kbzw6" event={"ID":"d28c8926-6e80-4666-8051-6decfafa89f2","Type":"ContainerDied","Data":"8c0c5c8f5d0268219022e3a0d3a8996b4f72ba2d3516a2f360ce1e767696eebe"} Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.904879 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.910866 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.992771 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5jhd\" (UniqueName: \"kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd\") pod \"d5943822-9460-4636-881e-1a37d605b4c6\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.992986 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts\") pod \"d5943822-9460-4636-881e-1a37d605b4c6\" (UID: \"d5943822-9460-4636-881e-1a37d605b4c6\") " Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.993016 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sp9l\" (UniqueName: \"kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l\") pod \"d28c8926-6e80-4666-8051-6decfafa89f2\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.993108 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts\") pod \"d28c8926-6e80-4666-8051-6decfafa89f2\" (UID: \"d28c8926-6e80-4666-8051-6decfafa89f2\") " Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.994096 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d28c8926-6e80-4666-8051-6decfafa89f2" (UID: "d28c8926-6e80-4666-8051-6decfafa89f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.994347 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d5943822-9460-4636-881e-1a37d605b4c6" (UID: "d5943822-9460-4636-881e-1a37d605b4c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.998599 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd" (OuterVolumeSpecName: "kube-api-access-z5jhd") pod "d5943822-9460-4636-881e-1a37d605b4c6" (UID: "d5943822-9460-4636-881e-1a37d605b4c6"). InnerVolumeSpecName "kube-api-access-z5jhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:13 crc kubenswrapper[4982]: I0122 07:16:13.998933 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l" (OuterVolumeSpecName: "kube-api-access-7sp9l") pod "d28c8926-6e80-4666-8051-6decfafa89f2" (UID: "d28c8926-6e80-4666-8051-6decfafa89f2"). InnerVolumeSpecName "kube-api-access-7sp9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.096721 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5jhd\" (UniqueName: \"kubernetes.io/projected/d5943822-9460-4636-881e-1a37d605b4c6-kube-api-access-z5jhd\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.096776 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5943822-9460-4636-881e-1a37d605b4c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.096789 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sp9l\" (UniqueName: \"kubernetes.io/projected/d28c8926-6e80-4666-8051-6decfafa89f2-kube-api-access-7sp9l\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.096801 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d28c8926-6e80-4666-8051-6decfafa89f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.524193 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qnsmv" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.524190 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qnsmv" event={"ID":"d5943822-9460-4636-881e-1a37d605b4c6","Type":"ContainerDied","Data":"06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9"} Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.524703 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06d4b180c017720b36bbf92482d9a9aede19a92f82f6a2520fa5471603c6c2c9" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.526679 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1e21-account-create-update-kbzw6" event={"ID":"d28c8926-6e80-4666-8051-6decfafa89f2","Type":"ContainerDied","Data":"ee79dd892430dcb068c931abe429bce2582123e1f71a496c4093e0d6c82e567b"} Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.526730 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee79dd892430dcb068c931abe429bce2582123e1f71a496c4093e0d6c82e567b" Jan 22 07:16:14 crc kubenswrapper[4982]: I0122 07:16:14.526751 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1e21-account-create-update-kbzw6" Jan 22 07:16:14 crc kubenswrapper[4982]: E0122 07:16:14.666348 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5943822_9460_4636_881e_1a37d605b4c6.slice\": RecentStats: unable to find data in memory cache]" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.636322 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jqfl4"] Jan 22 07:16:15 crc kubenswrapper[4982]: E0122 07:16:15.637030 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28c8926-6e80-4666-8051-6decfafa89f2" containerName="mariadb-account-create-update" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.637046 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28c8926-6e80-4666-8051-6decfafa89f2" containerName="mariadb-account-create-update" Jan 22 07:16:15 crc kubenswrapper[4982]: E0122 07:16:15.637073 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5943822-9460-4636-881e-1a37d605b4c6" containerName="mariadb-database-create" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.637082 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5943822-9460-4636-881e-1a37d605b4c6" containerName="mariadb-database-create" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.637293 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d28c8926-6e80-4666-8051-6decfafa89f2" containerName="mariadb-account-create-update" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.637348 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5943822-9460-4636-881e-1a37d605b4c6" containerName="mariadb-database-create" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.638015 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.640615 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.640819 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-6dtsd" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.654575 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jqfl4"] Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.735417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.735476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz7qg\" (UniqueName: \"kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.735539 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.836574 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.836646 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.836697 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz7qg\" (UniqueName: \"kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.841577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.841808 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.871285 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz7qg\" (UniqueName: \"kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg\") pod \"barbican-db-sync-jqfl4\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:15 crc kubenswrapper[4982]: I0122 07:16:15.959249 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:16 crc kubenswrapper[4982]: I0122 07:16:16.457653 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jqfl4"] Jan 22 07:16:16 crc kubenswrapper[4982]: I0122 07:16:16.541518 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jqfl4" event={"ID":"cc572594-caf5-409f-a25f-31d354137ba1","Type":"ContainerStarted","Data":"2274ed3a05a4b4a504d3e2482aba244ed9094c529b324f73d550a8665d902459"} Jan 22 07:16:17 crc kubenswrapper[4982]: I0122 07:16:17.560042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jqfl4" event={"ID":"cc572594-caf5-409f-a25f-31d354137ba1","Type":"ContainerStarted","Data":"e1ad03a89efc3ad91fd023e9f8816b319215301524b8089f74efe076dc0ad631"} Jan 22 07:16:17 crc kubenswrapper[4982]: I0122 07:16:17.578395 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jqfl4" podStartSLOduration=2.578376705 podStartE2EDuration="2.578376705s" podCreationTimestamp="2026-01-22 07:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:17.57595621 +0000 UTC m=+5438.414594223" watchObservedRunningTime="2026-01-22 07:16:17.578376705 +0000 UTC m=+5438.417014708" Jan 22 07:16:18 crc kubenswrapper[4982]: I0122 07:16:18.590393 4982 generic.go:334] "Generic (PLEG): container finished" podID="cc572594-caf5-409f-a25f-31d354137ba1" containerID="e1ad03a89efc3ad91fd023e9f8816b319215301524b8089f74efe076dc0ad631" exitCode=0 Jan 22 07:16:18 crc kubenswrapper[4982]: I0122 07:16:18.590732 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jqfl4" event={"ID":"cc572594-caf5-409f-a25f-31d354137ba1","Type":"ContainerDied","Data":"e1ad03a89efc3ad91fd023e9f8816b319215301524b8089f74efe076dc0ad631"} Jan 22 07:16:19 crc kubenswrapper[4982]: I0122 07:16:19.728189 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:16:19 crc kubenswrapper[4982]: E0122 07:16:19.728707 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:16:19 crc kubenswrapper[4982]: I0122 07:16:19.988766 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.004845 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data\") pod \"cc572594-caf5-409f-a25f-31d354137ba1\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.004907 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle\") pod \"cc572594-caf5-409f-a25f-31d354137ba1\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.010083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "cc572594-caf5-409f-a25f-31d354137ba1" (UID: "cc572594-caf5-409f-a25f-31d354137ba1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.033754 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc572594-caf5-409f-a25f-31d354137ba1" (UID: "cc572594-caf5-409f-a25f-31d354137ba1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.105733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz7qg\" (UniqueName: \"kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg\") pod \"cc572594-caf5-409f-a25f-31d354137ba1\" (UID: \"cc572594-caf5-409f-a25f-31d354137ba1\") " Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.106272 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.106300 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc572594-caf5-409f-a25f-31d354137ba1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.109400 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg" (OuterVolumeSpecName: "kube-api-access-lz7qg") pod "cc572594-caf5-409f-a25f-31d354137ba1" (UID: "cc572594-caf5-409f-a25f-31d354137ba1"). InnerVolumeSpecName "kube-api-access-lz7qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.207634 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz7qg\" (UniqueName: \"kubernetes.io/projected/cc572594-caf5-409f-a25f-31d354137ba1-kube-api-access-lz7qg\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.614092 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jqfl4" event={"ID":"cc572594-caf5-409f-a25f-31d354137ba1","Type":"ContainerDied","Data":"2274ed3a05a4b4a504d3e2482aba244ed9094c529b324f73d550a8665d902459"} Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.614171 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2274ed3a05a4b4a504d3e2482aba244ed9094c529b324f73d550a8665d902459" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.614798 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jqfl4" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.817201 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6d5cd9b87c-dt272"] Jan 22 07:16:20 crc kubenswrapper[4982]: E0122 07:16:20.817533 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc572594-caf5-409f-a25f-31d354137ba1" containerName="barbican-db-sync" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.817546 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc572594-caf5-409f-a25f-31d354137ba1" containerName="barbican-db-sync" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.817713 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc572594-caf5-409f-a25f-31d354137ba1" containerName="barbican-db-sync" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.818599 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.823193 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-6dtsd" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.823467 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.823780 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.830706 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-775476cb98-9sq5r"] Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.835703 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.838260 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.859324 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6d5cd9b87c-dt272"] Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.876248 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-775476cb98-9sq5r"] Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.920058 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data-custom\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.920110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.920220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-combined-ca-bundle\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.920265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-logs\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.920281 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lhvc\" (UniqueName: \"kubernetes.io/projected/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-kube-api-access-7lhvc\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.952618 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.954529 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:20 crc kubenswrapper[4982]: I0122 07:16:20.969568 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data-custom\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022458 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022480 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t666t\" (UniqueName: \"kubernetes.io/projected/8741cfe8-0ee7-41ac-9740-b58be69eb376-kube-api-access-t666t\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022508 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-combined-ca-bundle\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022577 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-combined-ca-bundle\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022610 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8741cfe8-0ee7-41ac-9740-b58be69eb376-logs\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-logs\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022648 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022668 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lhvc\" (UniqueName: \"kubernetes.io/projected/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-kube-api-access-7lhvc\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.022710 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data-custom\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.026396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-logs\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.030822 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data-custom\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.037521 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-config-data\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.037710 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-combined-ca-bundle\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.041314 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-856577cd68-rkqcj"] Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.047659 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.049382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lhvc\" (UniqueName: \"kubernetes.io/projected/ceadad66-b472-4bf5-ba62-2d91e7eb8b9f-kube-api-access-7lhvc\") pod \"barbican-worker-6d5cd9b87c-dt272\" (UID: \"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f\") " pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.052307 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.071680 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-856577cd68-rkqcj"] Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124565 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8741cfe8-0ee7-41ac-9740-b58be69eb376-logs\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124688 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124829 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124914 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124964 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data-custom\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124994 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8741cfe8-0ee7-41ac-9740-b58be69eb376-logs\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.129746 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.124999 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t666t\" (UniqueName: \"kubernetes.io/projected/8741cfe8-0ee7-41ac-9740-b58be69eb376-kube-api-access-t666t\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.130315 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-combined-ca-bundle\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.130373 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.130591 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr5rb\" (UniqueName: \"kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.134900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-config-data-custom\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.136528 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8741cfe8-0ee7-41ac-9740-b58be69eb376-combined-ca-bundle\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.144145 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t666t\" (UniqueName: \"kubernetes.io/projected/8741cfe8-0ee7-41ac-9740-b58be69eb376-kube-api-access-t666t\") pod \"barbican-keystone-listener-775476cb98-9sq5r\" (UID: \"8741cfe8-0ee7-41ac-9740-b58be69eb376\") " pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.154802 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6d5cd9b87c-dt272" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.169740 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231624 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data-custom\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231675 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231711 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn796\" (UniqueName: \"kubernetes.io/projected/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-kube-api-access-pn796\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231730 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231749 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-logs\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-combined-ca-bundle\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231886 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr5rb\" (UniqueName: \"kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231908 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.231925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.232747 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.234089 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.234565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.234680 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.261777 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr5rb\" (UniqueName: \"kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb\") pod \"dnsmasq-dns-566bf687f9-kdl4x\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.280293 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.333789 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.333961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data-custom\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.334013 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn796\" (UniqueName: \"kubernetes.io/projected/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-kube-api-access-pn796\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.334040 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-logs\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.334102 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-combined-ca-bundle\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.335134 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-logs\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.340373 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-combined-ca-bundle\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.340421 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data-custom\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.346939 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-config-data\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.351812 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn796\" (UniqueName: \"kubernetes.io/projected/1be85ddb-0864-4dc7-bd72-8f3cc1443bd5-kube-api-access-pn796\") pod \"barbican-api-856577cd68-rkqcj\" (UID: \"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5\") " pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.411771 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.705164 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6d5cd9b87c-dt272"] Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.857399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-775476cb98-9sq5r"] Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.935845 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:16:21 crc kubenswrapper[4982]: W0122 07:16:21.942034 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1be85ddb_0864_4dc7_bd72_8f3cc1443bd5.slice/crio-a3b21e4642087ccdaf784f508e813b1e189e2251ad91463cbedf7c9491ba1099 WatchSource:0}: Error finding container a3b21e4642087ccdaf784f508e813b1e189e2251ad91463cbedf7c9491ba1099: Status 404 returned error can't find the container with id a3b21e4642087ccdaf784f508e813b1e189e2251ad91463cbedf7c9491ba1099 Jan 22 07:16:21 crc kubenswrapper[4982]: I0122 07:16:21.942405 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-856577cd68-rkqcj"] Jan 22 07:16:21 crc kubenswrapper[4982]: W0122 07:16:21.946563 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51525290_f872_41f4_98b9_544b54873ed6.slice/crio-5f9b2126d11b43aade73fdd41a88d1560de8119400693b6a49407b6c7a66e866 WatchSource:0}: Error finding container 5f9b2126d11b43aade73fdd41a88d1560de8119400693b6a49407b6c7a66e866: Status 404 returned error can't find the container with id 5f9b2126d11b43aade73fdd41a88d1560de8119400693b6a49407b6c7a66e866 Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.632682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-856577cd68-rkqcj" event={"ID":"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5","Type":"ContainerStarted","Data":"c3c6a64a34fc5efea5489462d675e0e6e2de865e03d4c9099139ac5fe7448a97"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.633343 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.633363 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-856577cd68-rkqcj" event={"ID":"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5","Type":"ContainerStarted","Data":"e6d05a36cc10d7a124c6d8f56f9064d0a29c0bf58febca57c98f6a538c48c88f"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.633376 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-856577cd68-rkqcj" event={"ID":"1be85ddb-0864-4dc7-bd72-8f3cc1443bd5","Type":"ContainerStarted","Data":"a3b21e4642087ccdaf784f508e813b1e189e2251ad91463cbedf7c9491ba1099"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.634659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d5cd9b87c-dt272" event={"ID":"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f","Type":"ContainerStarted","Data":"36ce7a56853a1702783c29c81aab1636d0c316e39494fdc58e921faf38751857"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.634801 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d5cd9b87c-dt272" event={"ID":"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f","Type":"ContainerStarted","Data":"be751075cd0ea2c8124693d095b76efa102aed5e8ecc94443c4ebdd49e3ef5a5"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.634910 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d5cd9b87c-dt272" event={"ID":"ceadad66-b472-4bf5-ba62-2d91e7eb8b9f","Type":"ContainerStarted","Data":"f10ba59f77e8ac85e5b44ddf760d360034bf04f356bddd6dd76c1939bc75769f"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.641680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" event={"ID":"8741cfe8-0ee7-41ac-9740-b58be69eb376","Type":"ContainerStarted","Data":"59c4fd236b69f94f85435416fcc82847117082d383668a6c8cdf9e888d5c6bb8"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.641743 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" event={"ID":"8741cfe8-0ee7-41ac-9740-b58be69eb376","Type":"ContainerStarted","Data":"9147050f583b4418188ef626e2c380d642e322c55fa01736e4288f39d1c77931"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.641757 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" event={"ID":"8741cfe8-0ee7-41ac-9740-b58be69eb376","Type":"ContainerStarted","Data":"57847afccf150d4729f8912521669a175fd1dd7ddc80e5eb8874b401fbe66abf"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.644362 4982 generic.go:334] "Generic (PLEG): container finished" podID="51525290-f872-41f4-98b9-544b54873ed6" containerID="fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea" exitCode=0 Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.644416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" event={"ID":"51525290-f872-41f4-98b9-544b54873ed6","Type":"ContainerDied","Data":"fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.644448 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" event={"ID":"51525290-f872-41f4-98b9-544b54873ed6","Type":"ContainerStarted","Data":"5f9b2126d11b43aade73fdd41a88d1560de8119400693b6a49407b6c7a66e866"} Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.664085 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-856577cd68-rkqcj" podStartSLOduration=1.6640557569999999 podStartE2EDuration="1.664055757s" podCreationTimestamp="2026-01-22 07:16:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:22.661991121 +0000 UTC m=+5443.500629164" watchObservedRunningTime="2026-01-22 07:16:22.664055757 +0000 UTC m=+5443.502693750" Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.680247 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6d5cd9b87c-dt272" podStartSLOduration=2.680221543 podStartE2EDuration="2.680221543s" podCreationTimestamp="2026-01-22 07:16:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:22.678481806 +0000 UTC m=+5443.517119829" watchObservedRunningTime="2026-01-22 07:16:22.680221543 +0000 UTC m=+5443.518859556" Jan 22 07:16:22 crc kubenswrapper[4982]: I0122 07:16:22.710034 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-775476cb98-9sq5r" podStartSLOduration=2.710008367 podStartE2EDuration="2.710008367s" podCreationTimestamp="2026-01-22 07:16:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:22.702890814 +0000 UTC m=+5443.541528817" watchObservedRunningTime="2026-01-22 07:16:22.710008367 +0000 UTC m=+5443.548646370" Jan 22 07:16:23 crc kubenswrapper[4982]: I0122 07:16:23.654264 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" event={"ID":"51525290-f872-41f4-98b9-544b54873ed6","Type":"ContainerStarted","Data":"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11"} Jan 22 07:16:23 crc kubenswrapper[4982]: I0122 07:16:23.655179 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:23 crc kubenswrapper[4982]: I0122 07:16:23.675573 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" podStartSLOduration=3.675549861 podStartE2EDuration="3.675549861s" podCreationTimestamp="2026-01-22 07:16:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:23.673004252 +0000 UTC m=+5444.511642255" watchObservedRunningTime="2026-01-22 07:16:23.675549861 +0000 UTC m=+5444.514187854" Jan 22 07:16:24 crc kubenswrapper[4982]: I0122 07:16:24.662019 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.282022 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.340599 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.340895 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-756f47c947-jbv27" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="dnsmasq-dns" containerID="cri-o://0ae8f24d6862b809e54ac6c0411cf7967192f507288f2b580c27258ef40a2ae8" gracePeriod=10 Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.729003 4982 generic.go:334] "Generic (PLEG): container finished" podID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerID="0ae8f24d6862b809e54ac6c0411cf7967192f507288f2b580c27258ef40a2ae8" exitCode=0 Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.735306 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756f47c947-jbv27" event={"ID":"d96b74b5-6a64-4631-b1b1-c0bc3f173606","Type":"ContainerDied","Data":"0ae8f24d6862b809e54ac6c0411cf7967192f507288f2b580c27258ef40a2ae8"} Jan 22 07:16:31 crc kubenswrapper[4982]: I0122 07:16:31.914741 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.055478 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config\") pod \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.055566 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc\") pod \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.055624 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb\") pod \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.055661 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsm8s\" (UniqueName: \"kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s\") pod \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.055748 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb\") pod \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\" (UID: \"d96b74b5-6a64-4631-b1b1-c0bc3f173606\") " Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.079192 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s" (OuterVolumeSpecName: "kube-api-access-vsm8s") pod "d96b74b5-6a64-4631-b1b1-c0bc3f173606" (UID: "d96b74b5-6a64-4631-b1b1-c0bc3f173606"). InnerVolumeSpecName "kube-api-access-vsm8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.111040 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d96b74b5-6a64-4631-b1b1-c0bc3f173606" (UID: "d96b74b5-6a64-4631-b1b1-c0bc3f173606"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.115194 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d96b74b5-6a64-4631-b1b1-c0bc3f173606" (UID: "d96b74b5-6a64-4631-b1b1-c0bc3f173606"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.117623 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d96b74b5-6a64-4631-b1b1-c0bc3f173606" (UID: "d96b74b5-6a64-4631-b1b1-c0bc3f173606"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.131977 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config" (OuterVolumeSpecName: "config") pod "d96b74b5-6a64-4631-b1b1-c0bc3f173606" (UID: "d96b74b5-6a64-4631-b1b1-c0bc3f173606"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.158806 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.158839 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.158873 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.158884 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsm8s\" (UniqueName: \"kubernetes.io/projected/d96b74b5-6a64-4631-b1b1-c0bc3f173606-kube-api-access-vsm8s\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.158895 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d96b74b5-6a64-4631-b1b1-c0bc3f173606-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.737333 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-756f47c947-jbv27" event={"ID":"d96b74b5-6a64-4631-b1b1-c0bc3f173606","Type":"ContainerDied","Data":"88b3add774a4aa3f20d0ae888ca0c1b509a8299f17370fa665ed73e8e5b7e2a0"} Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.737393 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-756f47c947-jbv27" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.737401 4982 scope.go:117] "RemoveContainer" containerID="0ae8f24d6862b809e54ac6c0411cf7967192f507288f2b580c27258ef40a2ae8" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.765239 4982 scope.go:117] "RemoveContainer" containerID="3df9fa92a5f6b608fd7673474a3f3adc9a195ea15bf5c3d69d06532992843bf2" Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.814183 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:16:32 crc kubenswrapper[4982]: I0122 07:16:32.846305 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-756f47c947-jbv27"] Jan 22 07:16:33 crc kubenswrapper[4982]: I0122 07:16:33.062083 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:33 crc kubenswrapper[4982]: I0122 07:16:33.166229 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-856577cd68-rkqcj" Jan 22 07:16:33 crc kubenswrapper[4982]: I0122 07:16:33.728381 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" path="/var/lib/kubelet/pods/d96b74b5-6a64-4631-b1b1-c0bc3f173606/volumes" Jan 22 07:16:34 crc kubenswrapper[4982]: I0122 07:16:34.719786 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:16:34 crc kubenswrapper[4982]: E0122 07:16:34.720332 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.587444 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:16:44 crc kubenswrapper[4982]: E0122 07:16:44.588989 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="init" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.589025 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="init" Jan 22 07:16:44 crc kubenswrapper[4982]: E0122 07:16:44.589056 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="dnsmasq-dns" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.589074 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="dnsmasq-dns" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.589490 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96b74b5-6a64-4631-b1b1-c0bc3f173606" containerName="dnsmasq-dns" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.592532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.601832 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.720503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4qjn\" (UniqueName: \"kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.721032 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.721162 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.823775 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.824084 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.824195 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4qjn\" (UniqueName: \"kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.825155 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.825300 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.853194 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4qjn\" (UniqueName: \"kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn\") pod \"redhat-operators-2nmfq\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:44 crc kubenswrapper[4982]: I0122 07:16:44.925331 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.218975 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.487742 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-ff6jz"] Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.489293 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.495603 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ff6jz"] Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.590142 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-13ab-account-create-update-bksl7"] Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.591125 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.593876 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.605925 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-13ab-account-create-update-bksl7"] Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.645777 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.645821 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqtw6\" (UniqueName: \"kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.645924 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.645952 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kflcc\" (UniqueName: \"kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.748497 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.748581 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqtw6\" (UniqueName: \"kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.749326 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.749429 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kflcc\" (UniqueName: \"kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.750194 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.750798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.770712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kflcc\" (UniqueName: \"kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc\") pod \"neutron-13ab-account-create-update-bksl7\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.773369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqtw6\" (UniqueName: \"kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6\") pod \"neutron-db-create-ff6jz\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.806719 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.861215 4982 generic.go:334] "Generic (PLEG): container finished" podID="c7836b01-b9cf-4458-b512-51332cb48b71" containerID="3450d218a40b65efd7a58f8617391b105704de25c1cea7f826eb46587429421c" exitCode=0 Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.861259 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerDied","Data":"3450d218a40b65efd7a58f8617391b105704de25c1cea7f826eb46587429421c"} Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.861285 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerStarted","Data":"462c85947b7ffb57067d8fed12b24e110a3e836b9576b94f4d7146281924474a"} Jan 22 07:16:45 crc kubenswrapper[4982]: I0122 07:16:45.965949 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.318391 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ff6jz"] Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.486399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-13ab-account-create-update-bksl7"] Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.875121 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerStarted","Data":"448ba8a28e6e56af56060f7c4ec0bba7bd88ce46f899034c0d2d459eecbe4712"} Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.878077 4982 generic.go:334] "Generic (PLEG): container finished" podID="7dc8696d-82e4-4a14-a3c0-b265a7f39b62" containerID="623a8e9c01aad45ebea0f753127bd0ad71cfd276ef4199ddefaeca0304df9e6f" exitCode=0 Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.878235 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ff6jz" event={"ID":"7dc8696d-82e4-4a14-a3c0-b265a7f39b62","Type":"ContainerDied","Data":"623a8e9c01aad45ebea0f753127bd0ad71cfd276ef4199ddefaeca0304df9e6f"} Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.878270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ff6jz" event={"ID":"7dc8696d-82e4-4a14-a3c0-b265a7f39b62","Type":"ContainerStarted","Data":"b329958aa9a293074daed9f148fd200859f2827990e43b1470b3b53039e798a2"} Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.881371 4982 generic.go:334] "Generic (PLEG): container finished" podID="d616c023-d46f-43c4-bd46-a854aa01403c" containerID="d1cda6062ceb7c495ef42bee5a9cc32e73da65d43a1664cdd8d25126479b1d35" exitCode=0 Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.881406 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-13ab-account-create-update-bksl7" event={"ID":"d616c023-d46f-43c4-bd46-a854aa01403c","Type":"ContainerDied","Data":"d1cda6062ceb7c495ef42bee5a9cc32e73da65d43a1664cdd8d25126479b1d35"} Jan 22 07:16:46 crc kubenswrapper[4982]: I0122 07:16:46.881427 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-13ab-account-create-update-bksl7" event={"ID":"d616c023-d46f-43c4-bd46-a854aa01403c","Type":"ContainerStarted","Data":"3736ad5ce178380de693f535966122d56669a2a487de36ca714e8e24e3a3fa1c"} Jan 22 07:16:47 crc kubenswrapper[4982]: I0122 07:16:47.892522 4982 generic.go:334] "Generic (PLEG): container finished" podID="c7836b01-b9cf-4458-b512-51332cb48b71" containerID="448ba8a28e6e56af56060f7c4ec0bba7bd88ce46f899034c0d2d459eecbe4712" exitCode=0 Jan 22 07:16:47 crc kubenswrapper[4982]: I0122 07:16:47.892578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerDied","Data":"448ba8a28e6e56af56060f7c4ec0bba7bd88ce46f899034c0d2d459eecbe4712"} Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.276084 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.285290 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.397418 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kflcc\" (UniqueName: \"kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc\") pod \"d616c023-d46f-43c4-bd46-a854aa01403c\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.397519 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqtw6\" (UniqueName: \"kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6\") pod \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.397631 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts\") pod \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\" (UID: \"7dc8696d-82e4-4a14-a3c0-b265a7f39b62\") " Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.398033 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts\") pod \"d616c023-d46f-43c4-bd46-a854aa01403c\" (UID: \"d616c023-d46f-43c4-bd46-a854aa01403c\") " Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.398406 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7dc8696d-82e4-4a14-a3c0-b265a7f39b62" (UID: "7dc8696d-82e4-4a14-a3c0-b265a7f39b62"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.398519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d616c023-d46f-43c4-bd46-a854aa01403c" (UID: "d616c023-d46f-43c4-bd46-a854aa01403c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.404269 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc" (OuterVolumeSpecName: "kube-api-access-kflcc") pod "d616c023-d46f-43c4-bd46-a854aa01403c" (UID: "d616c023-d46f-43c4-bd46-a854aa01403c"). InnerVolumeSpecName "kube-api-access-kflcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.404371 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6" (OuterVolumeSpecName: "kube-api-access-wqtw6") pod "7dc8696d-82e4-4a14-a3c0-b265a7f39b62" (UID: "7dc8696d-82e4-4a14-a3c0-b265a7f39b62"). InnerVolumeSpecName "kube-api-access-wqtw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.499236 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d616c023-d46f-43c4-bd46-a854aa01403c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.499265 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kflcc\" (UniqueName: \"kubernetes.io/projected/d616c023-d46f-43c4-bd46-a854aa01403c-kube-api-access-kflcc\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.499279 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqtw6\" (UniqueName: \"kubernetes.io/projected/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-kube-api-access-wqtw6\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.499293 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7dc8696d-82e4-4a14-a3c0-b265a7f39b62-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.719967 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:16:48 crc kubenswrapper[4982]: E0122 07:16:48.720209 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.902496 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-13ab-account-create-update-bksl7" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.902707 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-13ab-account-create-update-bksl7" event={"ID":"d616c023-d46f-43c4-bd46-a854aa01403c","Type":"ContainerDied","Data":"3736ad5ce178380de693f535966122d56669a2a487de36ca714e8e24e3a3fa1c"} Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.903190 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3736ad5ce178380de693f535966122d56669a2a487de36ca714e8e24e3a3fa1c" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.905681 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ff6jz" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.906653 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ff6jz" event={"ID":"7dc8696d-82e4-4a14-a3c0-b265a7f39b62","Type":"ContainerDied","Data":"b329958aa9a293074daed9f148fd200859f2827990e43b1470b3b53039e798a2"} Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.906693 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b329958aa9a293074daed9f148fd200859f2827990e43b1470b3b53039e798a2" Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.911720 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerStarted","Data":"5d51c76577eff3e6e59f4de2c7b51eb8b5dbd7232c3e6b60aab3a2658be522b6"} Jan 22 07:16:48 crc kubenswrapper[4982]: I0122 07:16:48.935148 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2nmfq" podStartSLOduration=2.451062393 podStartE2EDuration="4.935129849s" podCreationTimestamp="2026-01-22 07:16:44 +0000 UTC" firstStartedPulling="2026-01-22 07:16:45.864116611 +0000 UTC m=+5466.702754614" lastFinishedPulling="2026-01-22 07:16:48.348184067 +0000 UTC m=+5469.186822070" observedRunningTime="2026-01-22 07:16:48.929433315 +0000 UTC m=+5469.768071319" watchObservedRunningTime="2026-01-22 07:16:48.935129849 +0000 UTC m=+5469.773767842" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.825490 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-ssz98"] Jan 22 07:16:50 crc kubenswrapper[4982]: E0122 07:16:50.825827 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc8696d-82e4-4a14-a3c0-b265a7f39b62" containerName="mariadb-database-create" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.825838 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc8696d-82e4-4a14-a3c0-b265a7f39b62" containerName="mariadb-database-create" Jan 22 07:16:50 crc kubenswrapper[4982]: E0122 07:16:50.825847 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d616c023-d46f-43c4-bd46-a854aa01403c" containerName="mariadb-account-create-update" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.825872 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d616c023-d46f-43c4-bd46-a854aa01403c" containerName="mariadb-account-create-update" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.826060 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d616c023-d46f-43c4-bd46-a854aa01403c" containerName="mariadb-account-create-update" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.826078 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc8696d-82e4-4a14-a3c0-b265a7f39b62" containerName="mariadb-database-create" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.826612 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.828558 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.828770 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.830315 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lpjr5" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.846966 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-ssz98"] Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.941238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.941338 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:50 crc kubenswrapper[4982]: I0122 07:16:50.941570 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dn6f\" (UniqueName: \"kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.043521 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dn6f\" (UniqueName: \"kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.043634 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.043716 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.049227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.050045 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.068176 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dn6f\" (UniqueName: \"kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f\") pod \"neutron-db-sync-ssz98\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.154754 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.599975 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-ssz98"] Jan 22 07:16:51 crc kubenswrapper[4982]: W0122 07:16:51.605103 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe56fa86_31ae_4c80_b1e5_ae439cd33588.slice/crio-26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7 WatchSource:0}: Error finding container 26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7: Status 404 returned error can't find the container with id 26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7 Jan 22 07:16:51 crc kubenswrapper[4982]: I0122 07:16:51.934012 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ssz98" event={"ID":"fe56fa86-31ae-4c80-b1e5-ae439cd33588","Type":"ContainerStarted","Data":"26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7"} Jan 22 07:16:52 crc kubenswrapper[4982]: I0122 07:16:52.943173 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ssz98" event={"ID":"fe56fa86-31ae-4c80-b1e5-ae439cd33588","Type":"ContainerStarted","Data":"81f6c7aced0303c529992bf97c04ef9a3ba3c8ea445cdb1aa7dd5a731632d16f"} Jan 22 07:16:52 crc kubenswrapper[4982]: I0122 07:16:52.975549 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-ssz98" podStartSLOduration=2.975529585 podStartE2EDuration="2.975529585s" podCreationTimestamp="2026-01-22 07:16:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:16:52.962051912 +0000 UTC m=+5473.800689915" watchObservedRunningTime="2026-01-22 07:16:52.975529585 +0000 UTC m=+5473.814167578" Jan 22 07:16:54 crc kubenswrapper[4982]: I0122 07:16:54.926119 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:54 crc kubenswrapper[4982]: I0122 07:16:54.926537 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:54 crc kubenswrapper[4982]: I0122 07:16:54.971364 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:56 crc kubenswrapper[4982]: I0122 07:16:56.012943 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:16:56 crc kubenswrapper[4982]: I0122 07:16:56.094886 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:16:57 crc kubenswrapper[4982]: I0122 07:16:57.984546 4982 generic.go:334] "Generic (PLEG): container finished" podID="fe56fa86-31ae-4c80-b1e5-ae439cd33588" containerID="81f6c7aced0303c529992bf97c04ef9a3ba3c8ea445cdb1aa7dd5a731632d16f" exitCode=0 Jan 22 07:16:57 crc kubenswrapper[4982]: I0122 07:16:57.984635 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ssz98" event={"ID":"fe56fa86-31ae-4c80-b1e5-ae439cd33588","Type":"ContainerDied","Data":"81f6c7aced0303c529992bf97c04ef9a3ba3c8ea445cdb1aa7dd5a731632d16f"} Jan 22 07:16:57 crc kubenswrapper[4982]: I0122 07:16:57.985019 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2nmfq" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="registry-server" containerID="cri-o://5d51c76577eff3e6e59f4de2c7b51eb8b5dbd7232c3e6b60aab3a2658be522b6" gracePeriod=2 Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.291424 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ssz98" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.302942 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dn6f\" (UniqueName: \"kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f\") pod \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.303042 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle\") pod \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.303127 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config\") pod \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\" (UID: \"fe56fa86-31ae-4c80-b1e5-ae439cd33588\") " Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.312011 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f" (OuterVolumeSpecName: "kube-api-access-2dn6f") pod "fe56fa86-31ae-4c80-b1e5-ae439cd33588" (UID: "fe56fa86-31ae-4c80-b1e5-ae439cd33588"). InnerVolumeSpecName "kube-api-access-2dn6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.331567 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe56fa86-31ae-4c80-b1e5-ae439cd33588" (UID: "fe56fa86-31ae-4c80-b1e5-ae439cd33588"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.332237 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config" (OuterVolumeSpecName: "config") pod "fe56fa86-31ae-4c80-b1e5-ae439cd33588" (UID: "fe56fa86-31ae-4c80-b1e5-ae439cd33588"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.404733 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.404774 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe56fa86-31ae-4c80-b1e5-ae439cd33588-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:16:59 crc kubenswrapper[4982]: I0122 07:16:59.404790 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dn6f\" (UniqueName: \"kubernetes.io/projected/fe56fa86-31ae-4c80-b1e5-ae439cd33588-kube-api-access-2dn6f\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.001490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ssz98" event={"ID":"fe56fa86-31ae-4c80-b1e5-ae439cd33588","Type":"ContainerDied","Data":"26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7"} Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.002047 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26617d378f29a6153a2bcc2cb901b5a35ddd56c2bb4727f02cd8481523f571f7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.001535 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ssz98" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.135594 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:17:00 crc kubenswrapper[4982]: E0122 07:17:00.136189 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe56fa86-31ae-4c80-b1e5-ae439cd33588" containerName="neutron-db-sync" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.136207 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe56fa86-31ae-4c80-b1e5-ae439cd33588" containerName="neutron-db-sync" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.136485 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe56fa86-31ae-4c80-b1e5-ae439cd33588" containerName="neutron-db-sync" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.137740 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.162320 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.218463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmdf8\" (UniqueName: \"kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.218539 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.218634 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.218709 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.218740 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.231551 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-57b5cdf6b5-f7gh7"] Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.233797 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.235633 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lpjr5" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.235880 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.237249 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.239642 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-57b5cdf6b5-f7gh7"] Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319616 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319714 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-httpd-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319747 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmdf8\" (UniqueName: \"kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319797 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmjl8\" (UniqueName: \"kubernetes.io/projected/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-kube-api-access-mmjl8\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.319981 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-combined-ca-bundle\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.320095 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.320215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.320661 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.322128 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.322414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.322765 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.340260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmdf8\" (UniqueName: \"kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8\") pod \"dnsmasq-dns-5ffdc4f547-pqtdn\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.421297 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-httpd-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.421342 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.421362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmjl8\" (UniqueName: \"kubernetes.io/projected/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-kube-api-access-mmjl8\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.421406 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-combined-ca-bundle\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.424713 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-httpd-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.425918 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-combined-ca-bundle\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.430391 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-config\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.441825 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmjl8\" (UniqueName: \"kubernetes.io/projected/b2809521-5cf5-49c3-bf1c-b8eefa79cd1b-kube-api-access-mmjl8\") pod \"neutron-57b5cdf6b5-f7gh7\" (UID: \"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b\") " pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.463804 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.552785 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.720974 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:17:00 crc kubenswrapper[4982]: E0122 07:17:00.722365 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:17:00 crc kubenswrapper[4982]: I0122 07:17:00.926358 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.011595 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" event={"ID":"adb4e59f-e146-4cfc-bc8e-00220637684c","Type":"ContainerStarted","Data":"922b88867ba1087eb09370e8e32283cc9b272ce390c7d9fc914ced4edb5ade96"} Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.017042 4982 generic.go:334] "Generic (PLEG): container finished" podID="c7836b01-b9cf-4458-b512-51332cb48b71" containerID="5d51c76577eff3e6e59f4de2c7b51eb8b5dbd7232c3e6b60aab3a2658be522b6" exitCode=0 Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.017088 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerDied","Data":"5d51c76577eff3e6e59f4de2c7b51eb8b5dbd7232c3e6b60aab3a2658be522b6"} Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.051839 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.133971 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-57b5cdf6b5-f7gh7"] Jan 22 07:17:01 crc kubenswrapper[4982]: W0122 07:17:01.135681 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2809521_5cf5_49c3_bf1c_b8eefa79cd1b.slice/crio-44c3aab6660b8448b085c50675bc3319dca7406dfb73a88812e04e3bcd0da255 WatchSource:0}: Error finding container 44c3aab6660b8448b085c50675bc3319dca7406dfb73a88812e04e3bcd0da255: Status 404 returned error can't find the container with id 44c3aab6660b8448b085c50675bc3319dca7406dfb73a88812e04e3bcd0da255 Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.147913 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content\") pod \"c7836b01-b9cf-4458-b512-51332cb48b71\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.147980 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4qjn\" (UniqueName: \"kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn\") pod \"c7836b01-b9cf-4458-b512-51332cb48b71\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.148138 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities\") pod \"c7836b01-b9cf-4458-b512-51332cb48b71\" (UID: \"c7836b01-b9cf-4458-b512-51332cb48b71\") " Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.149400 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities" (OuterVolumeSpecName: "utilities") pod "c7836b01-b9cf-4458-b512-51332cb48b71" (UID: "c7836b01-b9cf-4458-b512-51332cb48b71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.152769 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn" (OuterVolumeSpecName: "kube-api-access-t4qjn") pod "c7836b01-b9cf-4458-b512-51332cb48b71" (UID: "c7836b01-b9cf-4458-b512-51332cb48b71"). InnerVolumeSpecName "kube-api-access-t4qjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.249614 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4qjn\" (UniqueName: \"kubernetes.io/projected/c7836b01-b9cf-4458-b512-51332cb48b71-kube-api-access-t4qjn\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.249652 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.269320 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7836b01-b9cf-4458-b512-51332cb48b71" (UID: "c7836b01-b9cf-4458-b512-51332cb48b71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:01 crc kubenswrapper[4982]: I0122 07:17:01.351086 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7836b01-b9cf-4458-b512-51332cb48b71-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.026350 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2nmfq" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.026418 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2nmfq" event={"ID":"c7836b01-b9cf-4458-b512-51332cb48b71","Type":"ContainerDied","Data":"462c85947b7ffb57067d8fed12b24e110a3e836b9576b94f4d7146281924474a"} Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.026890 4982 scope.go:117] "RemoveContainer" containerID="5d51c76577eff3e6e59f4de2c7b51eb8b5dbd7232c3e6b60aab3a2658be522b6" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.029494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57b5cdf6b5-f7gh7" event={"ID":"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b","Type":"ContainerStarted","Data":"48214344709dcd0618961ae4278df54b0868be4322c41c0b0fd3b6982931ec35"} Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.029541 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57b5cdf6b5-f7gh7" event={"ID":"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b","Type":"ContainerStarted","Data":"76747c7410229289c19cd879a23b2cc141b88a66e3c4e1f14e1bb2e109086531"} Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.029556 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57b5cdf6b5-f7gh7" event={"ID":"b2809521-5cf5-49c3-bf1c-b8eefa79cd1b","Type":"ContainerStarted","Data":"44c3aab6660b8448b085c50675bc3319dca7406dfb73a88812e04e3bcd0da255"} Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.029676 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.032126 4982 generic.go:334] "Generic (PLEG): container finished" podID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerID="9c6f628a1c24a8f22565d2859a9071568b0025ff75ecc4fa1412098d776952ea" exitCode=0 Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.032167 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" event={"ID":"adb4e59f-e146-4cfc-bc8e-00220637684c","Type":"ContainerDied","Data":"9c6f628a1c24a8f22565d2859a9071568b0025ff75ecc4fa1412098d776952ea"} Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.057482 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.059276 4982 scope.go:117] "RemoveContainer" containerID="448ba8a28e6e56af56060f7c4ec0bba7bd88ce46f899034c0d2d459eecbe4712" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.068577 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2nmfq"] Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.082186 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-57b5cdf6b5-f7gh7" podStartSLOduration=2.082161819 podStartE2EDuration="2.082161819s" podCreationTimestamp="2026-01-22 07:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:02.075067408 +0000 UTC m=+5482.913705431" watchObservedRunningTime="2026-01-22 07:17:02.082161819 +0000 UTC m=+5482.920799822" Jan 22 07:17:02 crc kubenswrapper[4982]: I0122 07:17:02.111895 4982 scope.go:117] "RemoveContainer" containerID="3450d218a40b65efd7a58f8617391b105704de25c1cea7f826eb46587429421c" Jan 22 07:17:03 crc kubenswrapper[4982]: I0122 07:17:03.042607 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" event={"ID":"adb4e59f-e146-4cfc-bc8e-00220637684c","Type":"ContainerStarted","Data":"c8228336227b7a3d2e8faef5ac2a4295c92f6d202c766e47884e68d8ab2d7628"} Jan 22 07:17:03 crc kubenswrapper[4982]: I0122 07:17:03.067831 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" podStartSLOduration=3.067813916 podStartE2EDuration="3.067813916s" podCreationTimestamp="2026-01-22 07:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:03.061041314 +0000 UTC m=+5483.899679327" watchObservedRunningTime="2026-01-22 07:17:03.067813916 +0000 UTC m=+5483.906451919" Jan 22 07:17:03 crc kubenswrapper[4982]: I0122 07:17:03.730265 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" path="/var/lib/kubelet/pods/c7836b01-b9cf-4458-b512-51332cb48b71/volumes" Jan 22 07:17:04 crc kubenswrapper[4982]: I0122 07:17:04.049407 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:10 crc kubenswrapper[4982]: I0122 07:17:10.464997 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:17:10 crc kubenswrapper[4982]: I0122 07:17:10.539658 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:17:10 crc kubenswrapper[4982]: I0122 07:17:10.540247 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="dnsmasq-dns" containerID="cri-o://1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11" gracePeriod=10 Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.022004 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.106159 4982 generic.go:334] "Generic (PLEG): container finished" podID="51525290-f872-41f4-98b9-544b54873ed6" containerID="1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11" exitCode=0 Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.106207 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" event={"ID":"51525290-f872-41f4-98b9-544b54873ed6","Type":"ContainerDied","Data":"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11"} Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.106240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" event={"ID":"51525290-f872-41f4-98b9-544b54873ed6","Type":"ContainerDied","Data":"5f9b2126d11b43aade73fdd41a88d1560de8119400693b6a49407b6c7a66e866"} Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.106260 4982 scope.go:117] "RemoveContainer" containerID="1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.106402 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566bf687f9-kdl4x" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.115716 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb\") pod \"51525290-f872-41f4-98b9-544b54873ed6\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.116516 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xr5rb\" (UniqueName: \"kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb\") pod \"51525290-f872-41f4-98b9-544b54873ed6\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.116626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config\") pod \"51525290-f872-41f4-98b9-544b54873ed6\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.116671 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb\") pod \"51525290-f872-41f4-98b9-544b54873ed6\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.117224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc\") pod \"51525290-f872-41f4-98b9-544b54873ed6\" (UID: \"51525290-f872-41f4-98b9-544b54873ed6\") " Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.141236 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb" (OuterVolumeSpecName: "kube-api-access-xr5rb") pod "51525290-f872-41f4-98b9-544b54873ed6" (UID: "51525290-f872-41f4-98b9-544b54873ed6"). InnerVolumeSpecName "kube-api-access-xr5rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.148330 4982 scope.go:117] "RemoveContainer" containerID="fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.166776 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "51525290-f872-41f4-98b9-544b54873ed6" (UID: "51525290-f872-41f4-98b9-544b54873ed6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.172005 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "51525290-f872-41f4-98b9-544b54873ed6" (UID: "51525290-f872-41f4-98b9-544b54873ed6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.174049 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "51525290-f872-41f4-98b9-544b54873ed6" (UID: "51525290-f872-41f4-98b9-544b54873ed6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.195275 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config" (OuterVolumeSpecName: "config") pod "51525290-f872-41f4-98b9-544b54873ed6" (UID: "51525290-f872-41f4-98b9-544b54873ed6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.208522 4982 scope.go:117] "RemoveContainer" containerID="1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11" Jan 22 07:17:11 crc kubenswrapper[4982]: E0122 07:17:11.208970 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11\": container with ID starting with 1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11 not found: ID does not exist" containerID="1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.209023 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11"} err="failed to get container status \"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11\": rpc error: code = NotFound desc = could not find container \"1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11\": container with ID starting with 1d726e5c87d406debddacf37fd4783e07da4bb01befd2f943d8e9aad18b7eb11 not found: ID does not exist" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.209046 4982 scope.go:117] "RemoveContainer" containerID="fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea" Jan 22 07:17:11 crc kubenswrapper[4982]: E0122 07:17:11.209421 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea\": container with ID starting with fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea not found: ID does not exist" containerID="fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.209441 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea"} err="failed to get container status \"fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea\": rpc error: code = NotFound desc = could not find container \"fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea\": container with ID starting with fe4a157c3029e1a89eabd0280096004b265a3e76aa1dc145209ac488cd88bfea not found: ID does not exist" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.220056 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xr5rb\" (UniqueName: \"kubernetes.io/projected/51525290-f872-41f4-98b9-544b54873ed6-kube-api-access-xr5rb\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.220121 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.220136 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.220146 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.220156 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51525290-f872-41f4-98b9-544b54873ed6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.438386 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.446234 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-566bf687f9-kdl4x"] Jan 22 07:17:11 crc kubenswrapper[4982]: I0122 07:17:11.728831 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51525290-f872-41f4-98b9-544b54873ed6" path="/var/lib/kubelet/pods/51525290-f872-41f4-98b9-544b54873ed6/volumes" Jan 22 07:17:13 crc kubenswrapper[4982]: I0122 07:17:13.719674 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:17:13 crc kubenswrapper[4982]: E0122 07:17:13.720179 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:17:28 crc kubenswrapper[4982]: I0122 07:17:28.730461 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:17:28 crc kubenswrapper[4982]: E0122 07:17:28.731442 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.561682 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-57b5cdf6b5-f7gh7" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799390 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:30 crc kubenswrapper[4982]: E0122 07:17:30.799749 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="extract-utilities" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799769 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="extract-utilities" Jan 22 07:17:30 crc kubenswrapper[4982]: E0122 07:17:30.799785 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="init" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799793 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="init" Jan 22 07:17:30 crc kubenswrapper[4982]: E0122 07:17:30.799802 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="extract-content" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799808 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="extract-content" Jan 22 07:17:30 crc kubenswrapper[4982]: E0122 07:17:30.799823 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="registry-server" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799831 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="registry-server" Jan 22 07:17:30 crc kubenswrapper[4982]: E0122 07:17:30.799870 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="dnsmasq-dns" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.799879 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="dnsmasq-dns" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.800044 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7836b01-b9cf-4458-b512-51332cb48b71" containerName="registry-server" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.800074 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="51525290-f872-41f4-98b9-544b54873ed6" containerName="dnsmasq-dns" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.801570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.812802 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.853750 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.853901 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcdz7\" (UniqueName: \"kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.854300 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.955529 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.955599 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcdz7\" (UniqueName: \"kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.955748 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.955998 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.956115 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:30 crc kubenswrapper[4982]: I0122 07:17:30.977651 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcdz7\" (UniqueName: \"kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7\") pod \"redhat-marketplace-8kmtn\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:31 crc kubenswrapper[4982]: I0122 07:17:31.134500 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:31 crc kubenswrapper[4982]: I0122 07:17:31.579319 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:32 crc kubenswrapper[4982]: I0122 07:17:32.279607 4982 generic.go:334] "Generic (PLEG): container finished" podID="28c49df5-d523-4628-a571-736e542243bc" containerID="6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe" exitCode=0 Jan 22 07:17:32 crc kubenswrapper[4982]: I0122 07:17:32.279707 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerDied","Data":"6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe"} Jan 22 07:17:32 crc kubenswrapper[4982]: I0122 07:17:32.279897 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerStarted","Data":"e0cd5cef6b131d85089a651076e0350d63690531832d73ac98fb64c080c6e9e8"} Jan 22 07:17:33 crc kubenswrapper[4982]: I0122 07:17:33.292164 4982 generic.go:334] "Generic (PLEG): container finished" podID="28c49df5-d523-4628-a571-736e542243bc" containerID="9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d" exitCode=0 Jan 22 07:17:33 crc kubenswrapper[4982]: I0122 07:17:33.292226 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerDied","Data":"9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d"} Jan 22 07:17:34 crc kubenswrapper[4982]: I0122 07:17:34.306390 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerStarted","Data":"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3"} Jan 22 07:17:34 crc kubenswrapper[4982]: I0122 07:17:34.336571 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8kmtn" podStartSLOduration=2.9216275830000003 podStartE2EDuration="4.336553109s" podCreationTimestamp="2026-01-22 07:17:30 +0000 UTC" firstStartedPulling="2026-01-22 07:17:32.28128962 +0000 UTC m=+5513.119927623" lastFinishedPulling="2026-01-22 07:17:33.696215146 +0000 UTC m=+5514.534853149" observedRunningTime="2026-01-22 07:17:34.327346681 +0000 UTC m=+5515.165984684" watchObservedRunningTime="2026-01-22 07:17:34.336553109 +0000 UTC m=+5515.175191112" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.741967 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-rpjv8"] Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.744730 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-rpjv8"] Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.744920 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.808628 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-60c8-account-create-update-tww9j"] Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.809782 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.811423 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.814363 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.814588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94x6j\" (UniqueName: \"kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.819586 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-60c8-account-create-update-tww9j"] Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.915576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.915628 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjt5s\" (UniqueName: \"kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.915712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94x6j\" (UniqueName: \"kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.915754 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.917144 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:37 crc kubenswrapper[4982]: I0122 07:17:37.940579 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94x6j\" (UniqueName: \"kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j\") pod \"glance-db-create-rpjv8\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.017126 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.017236 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjt5s\" (UniqueName: \"kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.017899 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.047530 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjt5s\" (UniqueName: \"kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s\") pod \"glance-60c8-account-create-update-tww9j\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.074096 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.127353 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.557256 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-rpjv8"] Jan 22 07:17:38 crc kubenswrapper[4982]: I0122 07:17:38.615788 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-60c8-account-create-update-tww9j"] Jan 22 07:17:38 crc kubenswrapper[4982]: W0122 07:17:38.620813 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bd7c64b_dbde_48c4_9660_973f7d090527.slice/crio-247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b WatchSource:0}: Error finding container 247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b: Status 404 returned error can't find the container with id 247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.357192 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-rpjv8" event={"ID":"f867a75e-cf76-4cd4-8358-cccc26e0eb1d","Type":"ContainerStarted","Data":"7b92b28a084b034c412278e3958497b45e7762b3d89d63eb03b38924c58fbb03"} Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.357226 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-rpjv8" event={"ID":"f867a75e-cf76-4cd4-8358-cccc26e0eb1d","Type":"ContainerStarted","Data":"7e5924c523f334e5e47486b8310671e1ac3e018b476e2938d659d5c563417cd9"} Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.359008 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-60c8-account-create-update-tww9j" event={"ID":"7bd7c64b-dbde-48c4-9660-973f7d090527","Type":"ContainerStarted","Data":"ef3061a05dc05af79a876f392559e59ab6dda6e222e3af92a09956d034be7137"} Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.359033 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-60c8-account-create-update-tww9j" event={"ID":"7bd7c64b-dbde-48c4-9660-973f7d090527","Type":"ContainerStarted","Data":"247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b"} Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.397756 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-rpjv8" podStartSLOduration=2.397721189 podStartE2EDuration="2.397721189s" podCreationTimestamp="2026-01-22 07:17:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:39.383976989 +0000 UTC m=+5520.222615002" watchObservedRunningTime="2026-01-22 07:17:39.397721189 +0000 UTC m=+5520.236359232" Jan 22 07:17:39 crc kubenswrapper[4982]: I0122 07:17:39.416012 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-60c8-account-create-update-tww9j" podStartSLOduration=2.415983912 podStartE2EDuration="2.415983912s" podCreationTimestamp="2026-01-22 07:17:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:39.408363276 +0000 UTC m=+5520.247001309" watchObservedRunningTime="2026-01-22 07:17:39.415983912 +0000 UTC m=+5520.254621915" Jan 22 07:17:40 crc kubenswrapper[4982]: I0122 07:17:40.370130 4982 generic.go:334] "Generic (PLEG): container finished" podID="f867a75e-cf76-4cd4-8358-cccc26e0eb1d" containerID="7b92b28a084b034c412278e3958497b45e7762b3d89d63eb03b38924c58fbb03" exitCode=0 Jan 22 07:17:40 crc kubenswrapper[4982]: I0122 07:17:40.370193 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-rpjv8" event={"ID":"f867a75e-cf76-4cd4-8358-cccc26e0eb1d","Type":"ContainerDied","Data":"7b92b28a084b034c412278e3958497b45e7762b3d89d63eb03b38924c58fbb03"} Jan 22 07:17:40 crc kubenswrapper[4982]: I0122 07:17:40.371711 4982 generic.go:334] "Generic (PLEG): container finished" podID="7bd7c64b-dbde-48c4-9660-973f7d090527" containerID="ef3061a05dc05af79a876f392559e59ab6dda6e222e3af92a09956d034be7137" exitCode=0 Jan 22 07:17:40 crc kubenswrapper[4982]: I0122 07:17:40.371742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-60c8-account-create-update-tww9j" event={"ID":"7bd7c64b-dbde-48c4-9660-973f7d090527","Type":"ContainerDied","Data":"ef3061a05dc05af79a876f392559e59ab6dda6e222e3af92a09956d034be7137"} Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.134730 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.134811 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.186771 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.452919 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.519486 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.791478 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.796680 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.881578 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjt5s\" (UniqueName: \"kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s\") pod \"7bd7c64b-dbde-48c4-9660-973f7d090527\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.881643 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts\") pod \"7bd7c64b-dbde-48c4-9660-973f7d090527\" (UID: \"7bd7c64b-dbde-48c4-9660-973f7d090527\") " Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.881769 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94x6j\" (UniqueName: \"kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j\") pod \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.881831 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts\") pod \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\" (UID: \"f867a75e-cf76-4cd4-8358-cccc26e0eb1d\") " Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.882909 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7bd7c64b-dbde-48c4-9660-973f7d090527" (UID: "7bd7c64b-dbde-48c4-9660-973f7d090527"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.883149 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f867a75e-cf76-4cd4-8358-cccc26e0eb1d" (UID: "f867a75e-cf76-4cd4-8358-cccc26e0eb1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.888682 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j" (OuterVolumeSpecName: "kube-api-access-94x6j") pod "f867a75e-cf76-4cd4-8358-cccc26e0eb1d" (UID: "f867a75e-cf76-4cd4-8358-cccc26e0eb1d"). InnerVolumeSpecName "kube-api-access-94x6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.889553 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s" (OuterVolumeSpecName: "kube-api-access-zjt5s") pod "7bd7c64b-dbde-48c4-9660-973f7d090527" (UID: "7bd7c64b-dbde-48c4-9660-973f7d090527"). InnerVolumeSpecName "kube-api-access-zjt5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.983710 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjt5s\" (UniqueName: \"kubernetes.io/projected/7bd7c64b-dbde-48c4-9660-973f7d090527-kube-api-access-zjt5s\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.983774 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7bd7c64b-dbde-48c4-9660-973f7d090527-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.983786 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94x6j\" (UniqueName: \"kubernetes.io/projected/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-kube-api-access-94x6j\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:41 crc kubenswrapper[4982]: I0122 07:17:41.983797 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f867a75e-cf76-4cd4-8358-cccc26e0eb1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.398070 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-rpjv8" event={"ID":"f867a75e-cf76-4cd4-8358-cccc26e0eb1d","Type":"ContainerDied","Data":"7e5924c523f334e5e47486b8310671e1ac3e018b476e2938d659d5c563417cd9"} Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.398390 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e5924c523f334e5e47486b8310671e1ac3e018b476e2938d659d5c563417cd9" Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.398108 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-rpjv8" Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.401416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-60c8-account-create-update-tww9j" event={"ID":"7bd7c64b-dbde-48c4-9660-973f7d090527","Type":"ContainerDied","Data":"247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b"} Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.401470 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="247561ea58aef4680f48ff70f6f41e899dc14be9586cbf3356a9693c4ffad11b" Jan 22 07:17:42 crc kubenswrapper[4982]: I0122 07:17:42.401428 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-60c8-account-create-update-tww9j" Jan 22 07:17:43 crc kubenswrapper[4982]: I0122 07:17:43.407452 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8kmtn" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="registry-server" containerID="cri-o://e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3" gracePeriod=2 Jan 22 07:17:43 crc kubenswrapper[4982]: I0122 07:17:43.719037 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:17:43 crc kubenswrapper[4982]: E0122 07:17:43.719369 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.380969 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422441 4982 generic.go:334] "Generic (PLEG): container finished" podID="28c49df5-d523-4628-a571-736e542243bc" containerID="e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3" exitCode=0 Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422486 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerDied","Data":"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3"} Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422501 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kmtn" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422520 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kmtn" event={"ID":"28c49df5-d523-4628-a571-736e542243bc","Type":"ContainerDied","Data":"e0cd5cef6b131d85089a651076e0350d63690531832d73ac98fb64c080c6e9e8"} Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422540 4982 scope.go:117] "RemoveContainer" containerID="e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422826 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcdz7\" (UniqueName: \"kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7\") pod \"28c49df5-d523-4628-a571-736e542243bc\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422930 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities\") pod \"28c49df5-d523-4628-a571-736e542243bc\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.422978 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content\") pod \"28c49df5-d523-4628-a571-736e542243bc\" (UID: \"28c49df5-d523-4628-a571-736e542243bc\") " Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.423980 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities" (OuterVolumeSpecName: "utilities") pod "28c49df5-d523-4628-a571-736e542243bc" (UID: "28c49df5-d523-4628-a571-736e542243bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.429937 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7" (OuterVolumeSpecName: "kube-api-access-fcdz7") pod "28c49df5-d523-4628-a571-736e542243bc" (UID: "28c49df5-d523-4628-a571-736e542243bc"). InnerVolumeSpecName "kube-api-access-fcdz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.451724 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28c49df5-d523-4628-a571-736e542243bc" (UID: "28c49df5-d523-4628-a571-736e542243bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.477507 4982 scope.go:117] "RemoveContainer" containerID="9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.492468 4982 scope.go:117] "RemoveContainer" containerID="6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.525872 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcdz7\" (UniqueName: \"kubernetes.io/projected/28c49df5-d523-4628-a571-736e542243bc-kube-api-access-fcdz7\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.525915 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.525927 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c49df5-d523-4628-a571-736e542243bc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.529929 4982 scope.go:117] "RemoveContainer" containerID="e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3" Jan 22 07:17:44 crc kubenswrapper[4982]: E0122 07:17:44.530404 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3\": container with ID starting with e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3 not found: ID does not exist" containerID="e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.530457 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3"} err="failed to get container status \"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3\": rpc error: code = NotFound desc = could not find container \"e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3\": container with ID starting with e5b4ddb114882fc5f92de7582e4bfe19025a3a891a7eb4cdd357672916fc28a3 not found: ID does not exist" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.530489 4982 scope.go:117] "RemoveContainer" containerID="9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d" Jan 22 07:17:44 crc kubenswrapper[4982]: E0122 07:17:44.531040 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d\": container with ID starting with 9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d not found: ID does not exist" containerID="9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.531095 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d"} err="failed to get container status \"9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d\": rpc error: code = NotFound desc = could not find container \"9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d\": container with ID starting with 9e616a547579f1a3497721b81bba13c31e12483b40c1fdf85b88fe3e3556880d not found: ID does not exist" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.531124 4982 scope.go:117] "RemoveContainer" containerID="6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe" Jan 22 07:17:44 crc kubenswrapper[4982]: E0122 07:17:44.531523 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe\": container with ID starting with 6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe not found: ID does not exist" containerID="6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.531565 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe"} err="failed to get container status \"6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe\": rpc error: code = NotFound desc = could not find container \"6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe\": container with ID starting with 6619cbc97b8c114d4f8a71cd99da0f99d1e9f6911878688c0fc9c322c2b7a9fe not found: ID does not exist" Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.786584 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:44 crc kubenswrapper[4982]: I0122 07:17:44.792509 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kmtn"] Jan 22 07:17:45 crc kubenswrapper[4982]: I0122 07:17:45.731996 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28c49df5-d523-4628-a571-736e542243bc" path="/var/lib/kubelet/pods/28c49df5-d523-4628-a571-736e542243bc/volumes" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758209 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rznxl"] Jan 22 07:17:47 crc kubenswrapper[4982]: E0122 07:17:47.758797 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd7c64b-dbde-48c4-9660-973f7d090527" containerName="mariadb-account-create-update" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758809 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd7c64b-dbde-48c4-9660-973f7d090527" containerName="mariadb-account-create-update" Jan 22 07:17:47 crc kubenswrapper[4982]: E0122 07:17:47.758821 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="extract-content" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758827 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="extract-content" Jan 22 07:17:47 crc kubenswrapper[4982]: E0122 07:17:47.758860 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f867a75e-cf76-4cd4-8358-cccc26e0eb1d" containerName="mariadb-database-create" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758880 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f867a75e-cf76-4cd4-8358-cccc26e0eb1d" containerName="mariadb-database-create" Jan 22 07:17:47 crc kubenswrapper[4982]: E0122 07:17:47.758889 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="registry-server" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758895 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="registry-server" Jan 22 07:17:47 crc kubenswrapper[4982]: E0122 07:17:47.758903 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="extract-utilities" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.758909 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="extract-utilities" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.759063 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c49df5-d523-4628-a571-736e542243bc" containerName="registry-server" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.759083 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f867a75e-cf76-4cd4-8358-cccc26e0eb1d" containerName="mariadb-database-create" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.759102 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd7c64b-dbde-48c4-9660-973f7d090527" containerName="mariadb-account-create-update" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.759721 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.767054 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4n6ff" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.767094 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.775285 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rznxl"] Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.794067 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.794148 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxcnm\" (UniqueName: \"kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.794341 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.794470 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.896140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.896266 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.896328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.896363 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxcnm\" (UniqueName: \"kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.902636 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.916460 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.916524 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:47 crc kubenswrapper[4982]: I0122 07:17:47.921290 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxcnm\" (UniqueName: \"kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm\") pod \"glance-db-sync-rznxl\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:48 crc kubenswrapper[4982]: I0122 07:17:48.084709 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:48 crc kubenswrapper[4982]: I0122 07:17:48.627908 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rznxl"] Jan 22 07:17:49 crc kubenswrapper[4982]: I0122 07:17:49.465040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rznxl" event={"ID":"b2118180-20f1-4fc4-ba92-ee7b23e3d082","Type":"ContainerStarted","Data":"2a43c1eddb99759bb7dd764fb472489872dd2e099731e3117c3f5c2aa223d9ff"} Jan 22 07:17:49 crc kubenswrapper[4982]: I0122 07:17:49.465355 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rznxl" event={"ID":"b2118180-20f1-4fc4-ba92-ee7b23e3d082","Type":"ContainerStarted","Data":"c7c42527cc62aa3661d6b5c0dbee0b23128428551871cde764da5fbe8885820d"} Jan 22 07:17:49 crc kubenswrapper[4982]: I0122 07:17:49.483623 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rznxl" podStartSLOduration=2.483602688 podStartE2EDuration="2.483602688s" podCreationTimestamp="2026-01-22 07:17:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:49.478502691 +0000 UTC m=+5530.317140724" watchObservedRunningTime="2026-01-22 07:17:49.483602688 +0000 UTC m=+5530.322240691" Jan 22 07:17:53 crc kubenswrapper[4982]: I0122 07:17:53.495265 4982 generic.go:334] "Generic (PLEG): container finished" podID="b2118180-20f1-4fc4-ba92-ee7b23e3d082" containerID="2a43c1eddb99759bb7dd764fb472489872dd2e099731e3117c3f5c2aa223d9ff" exitCode=0 Jan 22 07:17:53 crc kubenswrapper[4982]: I0122 07:17:53.495377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rznxl" event={"ID":"b2118180-20f1-4fc4-ba92-ee7b23e3d082","Type":"ContainerDied","Data":"2a43c1eddb99759bb7dd764fb472489872dd2e099731e3117c3f5c2aa223d9ff"} Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.720086 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:17:54 crc kubenswrapper[4982]: E0122 07:17:54.720751 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.862444 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.996547 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data\") pod \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.996987 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data\") pod \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.997099 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle\") pod \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " Jan 22 07:17:54 crc kubenswrapper[4982]: I0122 07:17:54.997143 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxcnm\" (UniqueName: \"kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm\") pod \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\" (UID: \"b2118180-20f1-4fc4-ba92-ee7b23e3d082\") " Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.002192 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm" (OuterVolumeSpecName: "kube-api-access-qxcnm") pod "b2118180-20f1-4fc4-ba92-ee7b23e3d082" (UID: "b2118180-20f1-4fc4-ba92-ee7b23e3d082"). InnerVolumeSpecName "kube-api-access-qxcnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.010098 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b2118180-20f1-4fc4-ba92-ee7b23e3d082" (UID: "b2118180-20f1-4fc4-ba92-ee7b23e3d082"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.023287 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2118180-20f1-4fc4-ba92-ee7b23e3d082" (UID: "b2118180-20f1-4fc4-ba92-ee7b23e3d082"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.039741 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data" (OuterVolumeSpecName: "config-data") pod "b2118180-20f1-4fc4-ba92-ee7b23e3d082" (UID: "b2118180-20f1-4fc4-ba92-ee7b23e3d082"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.099611 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.099655 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.099675 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2118180-20f1-4fc4-ba92-ee7b23e3d082-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.099688 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxcnm\" (UniqueName: \"kubernetes.io/projected/b2118180-20f1-4fc4-ba92-ee7b23e3d082-kube-api-access-qxcnm\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.514143 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rznxl" event={"ID":"b2118180-20f1-4fc4-ba92-ee7b23e3d082","Type":"ContainerDied","Data":"c7c42527cc62aa3661d6b5c0dbee0b23128428551871cde764da5fbe8885820d"} Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.514183 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7c42527cc62aa3661d6b5c0dbee0b23128428551871cde764da5fbe8885820d" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.514244 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rznxl" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.910339 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:55 crc kubenswrapper[4982]: E0122 07:17:55.910688 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2118180-20f1-4fc4-ba92-ee7b23e3d082" containerName="glance-db-sync" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.910704 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2118180-20f1-4fc4-ba92-ee7b23e3d082" containerName="glance-db-sync" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.910930 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2118180-20f1-4fc4-ba92-ee7b23e3d082" containerName="glance-db-sync" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.912029 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.921637 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.921993 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.922021 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.923717 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4n6ff" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.927955 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.929304 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.946972 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:55 crc kubenswrapper[4982]: I0122 07:17:55.968995 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021412 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021429 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021473 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021490 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg8f5\" (UniqueName: \"kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021525 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8vqj\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.021970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.107312 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.108589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.117979 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.123982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124034 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124115 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg8f5\" (UniqueName: \"kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124137 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124158 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8vqj\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124200 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124269 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124299 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.124919 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.125214 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.126315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.127104 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.128221 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.130417 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.133103 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.133911 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.133978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.135477 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.143464 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.155727 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg8f5\" (UniqueName: \"kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5\") pod \"dnsmasq-dns-c4944f4c9-czwn4\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.178589 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8vqj\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj\") pod \"glance-default-external-api-0\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.225893 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.225939 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58wcc\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.225966 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.225986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.226037 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.226074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.226096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.240306 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.254228 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327144 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58wcc\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327189 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327254 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.327870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.328168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.334255 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.334311 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.336719 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.338212 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.355369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58wcc\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc\") pod \"glance-default-internal-api-0\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.433157 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.794081 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.857618 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:56 crc kubenswrapper[4982]: I0122 07:17:56.987462 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.033939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:17:57 crc kubenswrapper[4982]: W0122 07:17:57.109060 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b55c1e4_1277_43d4_8e95_16a906977646.slice/crio-5adef92374e5842a097467af231b5f4f7f394be012ef2650c4a06f2d0d57b8f8 WatchSource:0}: Error finding container 5adef92374e5842a097467af231b5f4f7f394be012ef2650c4a06f2d0d57b8f8: Status 404 returned error can't find the container with id 5adef92374e5842a097467af231b5f4f7f394be012ef2650c4a06f2d0d57b8f8 Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.554664 4982 generic.go:334] "Generic (PLEG): container finished" podID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerID="f97d3993d601001ab575d379b4562718280b50003880533c1bd0658d92a1b909" exitCode=0 Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.554749 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" event={"ID":"7b33a718-1da3-4ab4-a5bd-dcc7886a4944","Type":"ContainerDied","Data":"f97d3993d601001ab575d379b4562718280b50003880533c1bd0658d92a1b909"} Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.555119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" event={"ID":"7b33a718-1da3-4ab4-a5bd-dcc7886a4944","Type":"ContainerStarted","Data":"ab709b124b8551649656ff704a30dff73d065eb9355eb8957ee290a7f297caae"} Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.558737 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerStarted","Data":"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2"} Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.558771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerStarted","Data":"03d0899830b0359e616630153c4fdbf4427aef7984e7aec4d9631eaf2f68248e"} Jan 22 07:17:57 crc kubenswrapper[4982]: I0122 07:17:57.559676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerStarted","Data":"5adef92374e5842a097467af231b5f4f7f394be012ef2650c4a06f2d0d57b8f8"} Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.571597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" event={"ID":"7b33a718-1da3-4ab4-a5bd-dcc7886a4944","Type":"ContainerStarted","Data":"873376eef954133691a67bf10e722bf4ca08692abdb9a335d998553e60673c56"} Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.572237 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.582890 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-log" containerID="cri-o://9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" gracePeriod=30 Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.582977 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerStarted","Data":"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805"} Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.583070 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-httpd" containerID="cri-o://78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" gracePeriod=30 Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.587758 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerStarted","Data":"6414d90723e0d9e51a8c8496be0845c0aa6660a4f6713c191dbf227d6468c431"} Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.587806 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerStarted","Data":"d1dbe4c114ef8f4abaf65e63e8a401fa6ba0ebc188ad0ac6e385b3c0460907e3"} Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.597760 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" podStartSLOduration=3.597739056 podStartE2EDuration="3.597739056s" podCreationTimestamp="2026-01-22 07:17:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:58.590021037 +0000 UTC m=+5539.428659060" watchObservedRunningTime="2026-01-22 07:17:58.597739056 +0000 UTC m=+5539.436377059" Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.624140 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.624116168 podStartE2EDuration="3.624116168s" podCreationTimestamp="2026-01-22 07:17:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:58.610324986 +0000 UTC m=+5539.448962989" watchObservedRunningTime="2026-01-22 07:17:58.624116168 +0000 UTC m=+5539.462754181" Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.638797 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.638763983 podStartE2EDuration="2.638763983s" podCreationTimestamp="2026-01-22 07:17:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:17:58.634358434 +0000 UTC m=+5539.472996437" watchObservedRunningTime="2026-01-22 07:17:58.638763983 +0000 UTC m=+5539.477401986" Jan 22 07:17:58 crc kubenswrapper[4982]: I0122 07:17:58.934112 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.259185 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398173 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8vqj\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398266 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398319 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398452 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398469 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts\") pod \"89032892-d10b-436c-b756-92b5fbd4294d\" (UID: \"89032892-d10b-436c-b756-92b5fbd4294d\") " Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.398780 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs" (OuterVolumeSpecName: "logs") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.399474 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.403888 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj" (OuterVolumeSpecName: "kube-api-access-q8vqj") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "kube-api-access-q8vqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.404305 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph" (OuterVolumeSpecName: "ceph") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.405949 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts" (OuterVolumeSpecName: "scripts") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.423333 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.440670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data" (OuterVolumeSpecName: "config-data") pod "89032892-d10b-436c-b756-92b5fbd4294d" (UID: "89032892-d10b-436c-b756-92b5fbd4294d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.500946 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.500981 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.500991 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8vqj\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-kube-api-access-q8vqj\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.501003 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.501013 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89032892-d10b-436c-b756-92b5fbd4294d-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.501021 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89032892-d10b-436c-b756-92b5fbd4294d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.501030 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89032892-d10b-436c-b756-92b5fbd4294d-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.608658 4982 generic.go:334] "Generic (PLEG): container finished" podID="89032892-d10b-436c-b756-92b5fbd4294d" containerID="78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" exitCode=0 Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.608698 4982 generic.go:334] "Generic (PLEG): container finished" podID="89032892-d10b-436c-b756-92b5fbd4294d" containerID="9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" exitCode=143 Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.609110 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.609288 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerDied","Data":"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805"} Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.609486 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerDied","Data":"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2"} Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.609504 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89032892-d10b-436c-b756-92b5fbd4294d","Type":"ContainerDied","Data":"03d0899830b0359e616630153c4fdbf4427aef7984e7aec4d9631eaf2f68248e"} Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.609524 4982 scope.go:117] "RemoveContainer" containerID="78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.649687 4982 scope.go:117] "RemoveContainer" containerID="9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.678195 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.693521 4982 scope.go:117] "RemoveContainer" containerID="78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" Jan 22 07:17:59 crc kubenswrapper[4982]: E0122 07:17:59.694061 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805\": container with ID starting with 78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805 not found: ID does not exist" containerID="78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.694101 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805"} err="failed to get container status \"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805\": rpc error: code = NotFound desc = could not find container \"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805\": container with ID starting with 78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805 not found: ID does not exist" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.694128 4982 scope.go:117] "RemoveContainer" containerID="9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" Jan 22 07:17:59 crc kubenswrapper[4982]: E0122 07:17:59.694482 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2\": container with ID starting with 9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2 not found: ID does not exist" containerID="9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.694526 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2"} err="failed to get container status \"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2\": rpc error: code = NotFound desc = could not find container \"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2\": container with ID starting with 9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2 not found: ID does not exist" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.694543 4982 scope.go:117] "RemoveContainer" containerID="78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.694537 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.695036 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805"} err="failed to get container status \"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805\": rpc error: code = NotFound desc = could not find container \"78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805\": container with ID starting with 78e098f94b560073af2a649d8e2e761ef3e45797e86c1de9ed39295c17038805 not found: ID does not exist" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.695061 4982 scope.go:117] "RemoveContainer" containerID="9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.695353 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2"} err="failed to get container status \"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2\": rpc error: code = NotFound desc = could not find container \"9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2\": container with ID starting with 9eaf5abe550779974d8c068a38dffdafe8da3c78c9f156feb2bf8711cf57a8c2 not found: ID does not exist" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.701461 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:59 crc kubenswrapper[4982]: E0122 07:17:59.701812 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-httpd" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.701833 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-httpd" Jan 22 07:17:59 crc kubenswrapper[4982]: E0122 07:17:59.701876 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-log" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.701882 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-log" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.702053 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-log" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.702075 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="89032892-d10b-436c-b756-92b5fbd4294d" containerName="glance-httpd" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.702995 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.704989 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.709060 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.738356 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89032892-d10b-436c-b756-92b5fbd4294d" path="/var/lib/kubelet/pods/89032892-d10b-436c-b756-92b5fbd4294d/volumes" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814361 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814411 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814439 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814504 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814692 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814746 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.814916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v8zh\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917065 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917153 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917205 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917265 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917288 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.917334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v8zh\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.918032 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.918402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.921886 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.921919 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.922187 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.922619 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:17:59 crc kubenswrapper[4982]: I0122 07:17:59.935568 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v8zh\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh\") pod \"glance-default-external-api-0\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " pod="openstack/glance-default-external-api-0" Jan 22 07:18:00 crc kubenswrapper[4982]: I0122 07:18:00.022831 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:18:00 crc kubenswrapper[4982]: I0122 07:18:00.502643 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:18:00 crc kubenswrapper[4982]: I0122 07:18:00.618811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerStarted","Data":"4d16fd7a879a515a6d7ed5e663523c4603b7428d44dd0f1d888e9fdb9a97c22e"} Jan 22 07:18:00 crc kubenswrapper[4982]: I0122 07:18:00.621623 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-httpd" containerID="cri-o://6414d90723e0d9e51a8c8496be0845c0aa6660a4f6713c191dbf227d6468c431" gracePeriod=30 Jan 22 07:18:00 crc kubenswrapper[4982]: I0122 07:18:00.622043 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-log" containerID="cri-o://d1dbe4c114ef8f4abaf65e63e8a401fa6ba0ebc188ad0ac6e385b3c0460907e3" gracePeriod=30 Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.639174 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerStarted","Data":"fb1a5a2e9419aadbe51b3516c9aaa69f2b8583709e065ace2eb4583bdae9b71a"} Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.639671 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerStarted","Data":"992a53867c4a533ddc7603f80e288115bbe56be21aa45483c21b4394c6ac9c6e"} Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.640991 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b55c1e4-1277-43d4-8e95-16a906977646" containerID="6414d90723e0d9e51a8c8496be0845c0aa6660a4f6713c191dbf227d6468c431" exitCode=0 Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.641019 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b55c1e4-1277-43d4-8e95-16a906977646" containerID="d1dbe4c114ef8f4abaf65e63e8a401fa6ba0ebc188ad0ac6e385b3c0460907e3" exitCode=143 Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.641036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerDied","Data":"6414d90723e0d9e51a8c8496be0845c0aa6660a4f6713c191dbf227d6468c431"} Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.641056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerDied","Data":"d1dbe4c114ef8f4abaf65e63e8a401fa6ba0ebc188ad0ac6e385b3c0460907e3"} Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.802955 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952529 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952591 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952657 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952675 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952720 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952799 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58wcc\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.952862 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts\") pod \"6b55c1e4-1277-43d4-8e95-16a906977646\" (UID: \"6b55c1e4-1277-43d4-8e95-16a906977646\") " Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.954127 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.954262 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs" (OuterVolumeSpecName: "logs") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.957784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts" (OuterVolumeSpecName: "scripts") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.961197 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph" (OuterVolumeSpecName: "ceph") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.961226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc" (OuterVolumeSpecName: "kube-api-access-58wcc") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "kube-api-access-58wcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:01 crc kubenswrapper[4982]: I0122 07:18:01.976948 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.015144 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data" (OuterVolumeSpecName: "config-data") pod "6b55c1e4-1277-43d4-8e95-16a906977646" (UID: "6b55c1e4-1277-43d4-8e95-16a906977646"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055190 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055217 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055229 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055239 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055248 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55c1e4-1277-43d4-8e95-16a906977646-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055256 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58wcc\" (UniqueName: \"kubernetes.io/projected/6b55c1e4-1277-43d4-8e95-16a906977646-kube-api-access-58wcc\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.055264 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55c1e4-1277-43d4-8e95-16a906977646-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.653353 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b55c1e4-1277-43d4-8e95-16a906977646","Type":"ContainerDied","Data":"5adef92374e5842a097467af231b5f4f7f394be012ef2650c4a06f2d0d57b8f8"} Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.653383 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.653481 4982 scope.go:117] "RemoveContainer" containerID="6414d90723e0d9e51a8c8496be0845c0aa6660a4f6713c191dbf227d6468c431" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.691256 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.691230054 podStartE2EDuration="3.691230054s" podCreationTimestamp="2026-01-22 07:17:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:02.682101358 +0000 UTC m=+5543.520739381" watchObservedRunningTime="2026-01-22 07:18:02.691230054 +0000 UTC m=+5543.529868057" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.697489 4982 scope.go:117] "RemoveContainer" containerID="d1dbe4c114ef8f4abaf65e63e8a401fa6ba0ebc188ad0ac6e385b3c0460907e3" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.711167 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.720877 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.749395 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:18:02 crc kubenswrapper[4982]: E0122 07:18:02.749790 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-httpd" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.749811 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-httpd" Jan 22 07:18:02 crc kubenswrapper[4982]: E0122 07:18:02.750557 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-log" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.750578 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-log" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.750768 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-log" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.750791 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" containerName="glance-httpd" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.751992 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.754813 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.759019 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868611 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868705 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868721 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.868897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxwpk\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970720 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxwpk\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970783 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970811 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970837 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970890 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970908 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.970957 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.971870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.971975 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.975080 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.975269 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.975542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.976391 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:02 crc kubenswrapper[4982]: I0122 07:18:02.986107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxwpk\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk\") pod \"glance-default-internal-api-0\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:18:03 crc kubenswrapper[4982]: I0122 07:18:03.113158 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:03 crc kubenswrapper[4982]: I0122 07:18:03.159452 4982 scope.go:117] "RemoveContainer" containerID="989fad8b473b28c5abf128dd8119b0899546a4abd95f77a0b72ee8c974624505" Jan 22 07:18:03 crc kubenswrapper[4982]: I0122 07:18:03.351157 4982 scope.go:117] "RemoveContainer" containerID="45340d75c2c31c6f499c6b02f0c6bd9d3447cdbdcff688cd951806d16f19dae8" Jan 22 07:18:03 crc kubenswrapper[4982]: I0122 07:18:03.733815 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b55c1e4-1277-43d4-8e95-16a906977646" path="/var/lib/kubelet/pods/6b55c1e4-1277-43d4-8e95-16a906977646/volumes" Jan 22 07:18:03 crc kubenswrapper[4982]: I0122 07:18:03.773590 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:18:04 crc kubenswrapper[4982]: I0122 07:18:04.683082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerStarted","Data":"107d3502545453e5c563d9412a282714fd9d07c1c857236ba0ca9fb27a76bb74"} Jan 22 07:18:04 crc kubenswrapper[4982]: I0122 07:18:04.683120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerStarted","Data":"465b3a90e22df6991bd1c24e890238f6de119eb6bf93be1c265100bce3376731"} Jan 22 07:18:05 crc kubenswrapper[4982]: I0122 07:18:05.691364 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerStarted","Data":"f9262139b9c3e5cfcce5a204b83a9c760bb2a4290a1e2b4102a25d2e366247ac"} Jan 22 07:18:05 crc kubenswrapper[4982]: I0122 07:18:05.715718 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.7156940670000003 podStartE2EDuration="3.715694067s" podCreationTimestamp="2026-01-22 07:18:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:05.708789261 +0000 UTC m=+5546.547427264" watchObservedRunningTime="2026-01-22 07:18:05.715694067 +0000 UTC m=+5546.554332070" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.256132 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.334347 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.335249 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="dnsmasq-dns" containerID="cri-o://c8228336227b7a3d2e8faef5ac2a4295c92f6d202c766e47884e68d8ab2d7628" gracePeriod=10 Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.701712 4982 generic.go:334] "Generic (PLEG): container finished" podID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerID="c8228336227b7a3d2e8faef5ac2a4295c92f6d202c766e47884e68d8ab2d7628" exitCode=0 Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.702567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" event={"ID":"adb4e59f-e146-4cfc-bc8e-00220637684c","Type":"ContainerDied","Data":"c8228336227b7a3d2e8faef5ac2a4295c92f6d202c766e47884e68d8ab2d7628"} Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.719872 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:18:06 crc kubenswrapper[4982]: E0122 07:18:06.720212 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.829305 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.940351 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc\") pod \"adb4e59f-e146-4cfc-bc8e-00220637684c\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.940665 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmdf8\" (UniqueName: \"kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8\") pod \"adb4e59f-e146-4cfc-bc8e-00220637684c\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.940741 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config\") pod \"adb4e59f-e146-4cfc-bc8e-00220637684c\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.940769 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb\") pod \"adb4e59f-e146-4cfc-bc8e-00220637684c\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.940837 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb\") pod \"adb4e59f-e146-4cfc-bc8e-00220637684c\" (UID: \"adb4e59f-e146-4cfc-bc8e-00220637684c\") " Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.948777 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8" (OuterVolumeSpecName: "kube-api-access-fmdf8") pod "adb4e59f-e146-4cfc-bc8e-00220637684c" (UID: "adb4e59f-e146-4cfc-bc8e-00220637684c"). InnerVolumeSpecName "kube-api-access-fmdf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.979070 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "adb4e59f-e146-4cfc-bc8e-00220637684c" (UID: "adb4e59f-e146-4cfc-bc8e-00220637684c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.979665 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "adb4e59f-e146-4cfc-bc8e-00220637684c" (UID: "adb4e59f-e146-4cfc-bc8e-00220637684c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.982290 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "adb4e59f-e146-4cfc-bc8e-00220637684c" (UID: "adb4e59f-e146-4cfc-bc8e-00220637684c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:06 crc kubenswrapper[4982]: I0122 07:18:06.983060 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config" (OuterVolumeSpecName: "config") pod "adb4e59f-e146-4cfc-bc8e-00220637684c" (UID: "adb4e59f-e146-4cfc-bc8e-00220637684c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.042278 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.042310 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmdf8\" (UniqueName: \"kubernetes.io/projected/adb4e59f-e146-4cfc-bc8e-00220637684c-kube-api-access-fmdf8\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.042323 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.042335 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.042345 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb4e59f-e146-4cfc-bc8e-00220637684c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.713271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" event={"ID":"adb4e59f-e146-4cfc-bc8e-00220637684c","Type":"ContainerDied","Data":"922b88867ba1087eb09370e8e32283cc9b272ce390c7d9fc914ced4edb5ade96"} Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.713701 4982 scope.go:117] "RemoveContainer" containerID="c8228336227b7a3d2e8faef5ac2a4295c92f6d202c766e47884e68d8ab2d7628" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.713333 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ffdc4f547-pqtdn" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.736949 4982 scope.go:117] "RemoveContainer" containerID="9c6f628a1c24a8f22565d2859a9071568b0025ff75ecc4fa1412098d776952ea" Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.752030 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:18:07 crc kubenswrapper[4982]: I0122 07:18:07.761026 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ffdc4f547-pqtdn"] Jan 22 07:18:09 crc kubenswrapper[4982]: I0122 07:18:09.731748 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" path="/var/lib/kubelet/pods/adb4e59f-e146-4cfc-bc8e-00220637684c/volumes" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.023223 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.023558 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.054189 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.085727 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.760606 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 07:18:10 crc kubenswrapper[4982]: I0122 07:18:10.761496 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 07:18:12 crc kubenswrapper[4982]: I0122 07:18:12.795093 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 07:18:12 crc kubenswrapper[4982]: I0122 07:18:12.795613 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:18:12 crc kubenswrapper[4982]: I0122 07:18:12.822347 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.114019 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.114415 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.147524 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.157612 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.785174 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:13 crc kubenswrapper[4982]: I0122 07:18:13.786210 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:15 crc kubenswrapper[4982]: I0122 07:18:15.798439 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:18:15 crc kubenswrapper[4982]: I0122 07:18:15.798691 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:18:15 crc kubenswrapper[4982]: I0122 07:18:15.902389 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:15 crc kubenswrapper[4982]: I0122 07:18:15.933151 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 07:18:18 crc kubenswrapper[4982]: I0122 07:18:18.719629 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:18:18 crc kubenswrapper[4982]: E0122 07:18:18.720562 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.609433 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-ft4hj"] Jan 22 07:18:23 crc kubenswrapper[4982]: E0122 07:18:23.610194 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="dnsmasq-dns" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.610212 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="dnsmasq-dns" Jan 22 07:18:23 crc kubenswrapper[4982]: E0122 07:18:23.610265 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="init" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.610274 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="init" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.610500 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="adb4e59f-e146-4cfc-bc8e-00220637684c" containerName="dnsmasq-dns" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.611186 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.618914 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ft4hj"] Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.717364 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-ab6e-account-create-update-df4b7"] Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.718458 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.722123 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.765620 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ab6e-account-create-update-df4b7"] Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.777068 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg5bf\" (UniqueName: \"kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.777262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.879413 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bxn9\" (UniqueName: \"kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.879531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg5bf\" (UniqueName: \"kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.879588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.879632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.881026 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.901577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg5bf\" (UniqueName: \"kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf\") pod \"placement-db-create-ft4hj\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.931083 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.981476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bxn9\" (UniqueName: \"kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.981587 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.982355 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:23 crc kubenswrapper[4982]: I0122 07:18:23.999033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bxn9\" (UniqueName: \"kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9\") pod \"placement-ab6e-account-create-update-df4b7\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.067901 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.440766 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ft4hj"] Jan 22 07:18:24 crc kubenswrapper[4982]: W0122 07:18:24.444772 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4863b84e_5bd6_49d3_9b48_33d8beaeccab.slice/crio-202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a WatchSource:0}: Error finding container 202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a: Status 404 returned error can't find the container with id 202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.556179 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ab6e-account-create-update-df4b7"] Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.903303 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ft4hj" event={"ID":"4863b84e-5bd6-49d3-9b48-33d8beaeccab","Type":"ContainerStarted","Data":"af8101c6965c15ad53a74bcaca55d2812eb3729dc4e31240591b9a9e16ace35c"} Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.903354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ft4hj" event={"ID":"4863b84e-5bd6-49d3-9b48-33d8beaeccab","Type":"ContainerStarted","Data":"202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a"} Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.906754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ab6e-account-create-update-df4b7" event={"ID":"3d25ac88-1252-411d-97c7-a3516d6cf5a1","Type":"ContainerStarted","Data":"a3d01621d72192575e6823c1fb042d035d1e8a45e4efa60a65e9baaf5a78f9d5"} Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.906839 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ab6e-account-create-update-df4b7" event={"ID":"3d25ac88-1252-411d-97c7-a3516d6cf5a1","Type":"ContainerStarted","Data":"86e2a6d5edce3105d57935825cba1d208318f2e8df1ae50a0a09fa3328c27e3d"} Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.919579 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-ft4hj" podStartSLOduration=1.9195597370000002 podStartE2EDuration="1.919559737s" podCreationTimestamp="2026-01-22 07:18:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:24.918945521 +0000 UTC m=+5565.757583534" watchObservedRunningTime="2026-01-22 07:18:24.919559737 +0000 UTC m=+5565.758197740" Jan 22 07:18:24 crc kubenswrapper[4982]: I0122 07:18:24.938730 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-ab6e-account-create-update-df4b7" podStartSLOduration=1.9387120740000001 podStartE2EDuration="1.938712074s" podCreationTimestamp="2026-01-22 07:18:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:24.931657034 +0000 UTC m=+5565.770295037" watchObservedRunningTime="2026-01-22 07:18:24.938712074 +0000 UTC m=+5565.777350077" Jan 22 07:18:25 crc kubenswrapper[4982]: I0122 07:18:25.915574 4982 generic.go:334] "Generic (PLEG): container finished" podID="4863b84e-5bd6-49d3-9b48-33d8beaeccab" containerID="af8101c6965c15ad53a74bcaca55d2812eb3729dc4e31240591b9a9e16ace35c" exitCode=0 Jan 22 07:18:25 crc kubenswrapper[4982]: I0122 07:18:25.915641 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ft4hj" event={"ID":"4863b84e-5bd6-49d3-9b48-33d8beaeccab","Type":"ContainerDied","Data":"af8101c6965c15ad53a74bcaca55d2812eb3729dc4e31240591b9a9e16ace35c"} Jan 22 07:18:25 crc kubenswrapper[4982]: I0122 07:18:25.918646 4982 generic.go:334] "Generic (PLEG): container finished" podID="3d25ac88-1252-411d-97c7-a3516d6cf5a1" containerID="a3d01621d72192575e6823c1fb042d035d1e8a45e4efa60a65e9baaf5a78f9d5" exitCode=0 Jan 22 07:18:25 crc kubenswrapper[4982]: I0122 07:18:25.918685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ab6e-account-create-update-df4b7" event={"ID":"3d25ac88-1252-411d-97c7-a3516d6cf5a1","Type":"ContainerDied","Data":"a3d01621d72192575e6823c1fb042d035d1e8a45e4efa60a65e9baaf5a78f9d5"} Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.353077 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.362449 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.458125 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts\") pod \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.458235 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts\") pod \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.458402 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg5bf\" (UniqueName: \"kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf\") pod \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\" (UID: \"4863b84e-5bd6-49d3-9b48-33d8beaeccab\") " Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.458430 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bxn9\" (UniqueName: \"kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9\") pod \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\" (UID: \"3d25ac88-1252-411d-97c7-a3516d6cf5a1\") " Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.459494 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d25ac88-1252-411d-97c7-a3516d6cf5a1" (UID: "3d25ac88-1252-411d-97c7-a3516d6cf5a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.459602 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4863b84e-5bd6-49d3-9b48-33d8beaeccab" (UID: "4863b84e-5bd6-49d3-9b48-33d8beaeccab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.464907 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9" (OuterVolumeSpecName: "kube-api-access-9bxn9") pod "3d25ac88-1252-411d-97c7-a3516d6cf5a1" (UID: "3d25ac88-1252-411d-97c7-a3516d6cf5a1"). InnerVolumeSpecName "kube-api-access-9bxn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.470830 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf" (OuterVolumeSpecName: "kube-api-access-dg5bf") pod "4863b84e-5bd6-49d3-9b48-33d8beaeccab" (UID: "4863b84e-5bd6-49d3-9b48-33d8beaeccab"). InnerVolumeSpecName "kube-api-access-dg5bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.559905 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg5bf\" (UniqueName: \"kubernetes.io/projected/4863b84e-5bd6-49d3-9b48-33d8beaeccab-kube-api-access-dg5bf\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.559938 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bxn9\" (UniqueName: \"kubernetes.io/projected/3d25ac88-1252-411d-97c7-a3516d6cf5a1-kube-api-access-9bxn9\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.559950 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d25ac88-1252-411d-97c7-a3516d6cf5a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.559961 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4863b84e-5bd6-49d3-9b48-33d8beaeccab-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.935724 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ft4hj" event={"ID":"4863b84e-5bd6-49d3-9b48-33d8beaeccab","Type":"ContainerDied","Data":"202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a"} Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.936223 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="202812c84cafe712b51ffa3d6e17f224d2ca8ac46578556e63e66d5118a8263a" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.936296 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ft4hj" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.939651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ab6e-account-create-update-df4b7" event={"ID":"3d25ac88-1252-411d-97c7-a3516d6cf5a1","Type":"ContainerDied","Data":"86e2a6d5edce3105d57935825cba1d208318f2e8df1ae50a0a09fa3328c27e3d"} Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.939701 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86e2a6d5edce3105d57935825cba1d208318f2e8df1ae50a0a09fa3328c27e3d" Jan 22 07:18:27 crc kubenswrapper[4982]: I0122 07:18:27.939727 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ab6e-account-create-update-df4b7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.027400 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:18:29 crc kubenswrapper[4982]: E0122 07:18:29.027743 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4863b84e-5bd6-49d3-9b48-33d8beaeccab" containerName="mariadb-database-create" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.027761 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4863b84e-5bd6-49d3-9b48-33d8beaeccab" containerName="mariadb-database-create" Jan 22 07:18:29 crc kubenswrapper[4982]: E0122 07:18:29.027778 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d25ac88-1252-411d-97c7-a3516d6cf5a1" containerName="mariadb-account-create-update" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.027785 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d25ac88-1252-411d-97c7-a3516d6cf5a1" containerName="mariadb-account-create-update" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.027957 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4863b84e-5bd6-49d3-9b48-33d8beaeccab" containerName="mariadb-database-create" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.027970 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d25ac88-1252-411d-97c7-a3516d6cf5a1" containerName="mariadb-account-create-update" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.028870 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.041340 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.097081 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-znmfv"] Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.098577 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.100684 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.100807 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-btjc9" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.101044 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.108836 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-znmfv"] Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.191024 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.191073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.191103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.191175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qscqb\" (UniqueName: \"kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.191278 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292410 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292462 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbrvj\" (UniqueName: \"kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292531 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292569 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292643 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292668 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qscqb\" (UniqueName: \"kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.292707 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.293736 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.293742 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.293817 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.294535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.313900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qscqb\" (UniqueName: \"kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb\") pod \"dnsmasq-dns-f66ff87f9-twgk7\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.360050 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.395121 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.395223 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.395261 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbrvj\" (UniqueName: \"kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.395319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.395373 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.396342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.399459 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.402535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.403816 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.417478 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbrvj\" (UniqueName: \"kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj\") pod \"placement-db-sync-znmfv\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.717236 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.887142 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.948690 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-znmfv"] Jan 22 07:18:29 crc kubenswrapper[4982]: W0122 07:18:29.955459 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6053280f_39c8_494c_8e47_b0aad9d7f58e.slice/crio-7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691 WatchSource:0}: Error finding container 7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691: Status 404 returned error can't find the container with id 7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691 Jan 22 07:18:29 crc kubenswrapper[4982]: I0122 07:18:29.961105 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" event={"ID":"e6ecea22-10f0-439a-94d1-eb095d24fa73","Type":"ContainerStarted","Data":"c115cc1a11aef0b46a98ca9581318e3e7111cd52eef3a9b1c7ca2ec75781c400"} Jan 22 07:18:30 crc kubenswrapper[4982]: I0122 07:18:30.719506 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:18:30 crc kubenswrapper[4982]: E0122 07:18:30.720069 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:18:30 crc kubenswrapper[4982]: I0122 07:18:30.970660 4982 generic.go:334] "Generic (PLEG): container finished" podID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerID="58a80f26265211208fba9d90809a57da99fcfe5475e37544cbf9d669c244d96a" exitCode=0 Jan 22 07:18:30 crc kubenswrapper[4982]: I0122 07:18:30.971013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" event={"ID":"e6ecea22-10f0-439a-94d1-eb095d24fa73","Type":"ContainerDied","Data":"58a80f26265211208fba9d90809a57da99fcfe5475e37544cbf9d669c244d96a"} Jan 22 07:18:30 crc kubenswrapper[4982]: I0122 07:18:30.972145 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-znmfv" event={"ID":"6053280f-39c8-494c-8e47-b0aad9d7f58e","Type":"ContainerStarted","Data":"99a0f56c5828f9782f18c1f6c2faacc0b58155eb29ea76f833485d0469a33480"} Jan 22 07:18:30 crc kubenswrapper[4982]: I0122 07:18:30.972178 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-znmfv" event={"ID":"6053280f-39c8-494c-8e47-b0aad9d7f58e","Type":"ContainerStarted","Data":"7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691"} Jan 22 07:18:31 crc kubenswrapper[4982]: I0122 07:18:31.018454 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-znmfv" podStartSLOduration=2.01843239 podStartE2EDuration="2.01843239s" podCreationTimestamp="2026-01-22 07:18:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:31.013223049 +0000 UTC m=+5571.851861072" watchObservedRunningTime="2026-01-22 07:18:31.01843239 +0000 UTC m=+5571.857070393" Jan 22 07:18:31 crc kubenswrapper[4982]: I0122 07:18:31.987085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" event={"ID":"e6ecea22-10f0-439a-94d1-eb095d24fa73","Type":"ContainerStarted","Data":"db35e60ede15ea834b698e6d9c87973496cf0f9256898dbf4c9214e03ff2dab4"} Jan 22 07:18:32 crc kubenswrapper[4982]: I0122 07:18:32.025418 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" podStartSLOduration=3.025395203 podStartE2EDuration="3.025395203s" podCreationTimestamp="2026-01-22 07:18:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:32.016138762 +0000 UTC m=+5572.854776825" watchObservedRunningTime="2026-01-22 07:18:32.025395203 +0000 UTC m=+5572.864033216" Jan 22 07:18:32 crc kubenswrapper[4982]: I0122 07:18:32.995091 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:34 crc kubenswrapper[4982]: I0122 07:18:34.007900 4982 generic.go:334] "Generic (PLEG): container finished" podID="6053280f-39c8-494c-8e47-b0aad9d7f58e" containerID="99a0f56c5828f9782f18c1f6c2faacc0b58155eb29ea76f833485d0469a33480" exitCode=0 Jan 22 07:18:34 crc kubenswrapper[4982]: I0122 07:18:34.007965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-znmfv" event={"ID":"6053280f-39c8-494c-8e47-b0aad9d7f58e","Type":"ContainerDied","Data":"99a0f56c5828f9782f18c1f6c2faacc0b58155eb29ea76f833485d0469a33480"} Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.363722 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.511213 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data\") pod \"6053280f-39c8-494c-8e47-b0aad9d7f58e\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.511549 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts\") pod \"6053280f-39c8-494c-8e47-b0aad9d7f58e\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.511604 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs\") pod \"6053280f-39c8-494c-8e47-b0aad9d7f58e\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.511672 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbrvj\" (UniqueName: \"kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj\") pod \"6053280f-39c8-494c-8e47-b0aad9d7f58e\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.511724 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle\") pod \"6053280f-39c8-494c-8e47-b0aad9d7f58e\" (UID: \"6053280f-39c8-494c-8e47-b0aad9d7f58e\") " Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.512444 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs" (OuterVolumeSpecName: "logs") pod "6053280f-39c8-494c-8e47-b0aad9d7f58e" (UID: "6053280f-39c8-494c-8e47-b0aad9d7f58e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.519214 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj" (OuterVolumeSpecName: "kube-api-access-hbrvj") pod "6053280f-39c8-494c-8e47-b0aad9d7f58e" (UID: "6053280f-39c8-494c-8e47-b0aad9d7f58e"). InnerVolumeSpecName "kube-api-access-hbrvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.520192 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts" (OuterVolumeSpecName: "scripts") pod "6053280f-39c8-494c-8e47-b0aad9d7f58e" (UID: "6053280f-39c8-494c-8e47-b0aad9d7f58e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.558774 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6053280f-39c8-494c-8e47-b0aad9d7f58e" (UID: "6053280f-39c8-494c-8e47-b0aad9d7f58e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.560284 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data" (OuterVolumeSpecName: "config-data") pod "6053280f-39c8-494c-8e47-b0aad9d7f58e" (UID: "6053280f-39c8-494c-8e47-b0aad9d7f58e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.614671 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.615065 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6053280f-39c8-494c-8e47-b0aad9d7f58e-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.615274 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbrvj\" (UniqueName: \"kubernetes.io/projected/6053280f-39c8-494c-8e47-b0aad9d7f58e-kube-api-access-hbrvj\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.615457 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:35 crc kubenswrapper[4982]: I0122 07:18:35.615615 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6053280f-39c8-494c-8e47-b0aad9d7f58e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.028541 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-znmfv" event={"ID":"6053280f-39c8-494c-8e47-b0aad9d7f58e","Type":"ContainerDied","Data":"7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691"} Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.028599 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ebd680a164e1594e3bfe2e6039a91450cf9d842f0724af4352f46853552f691" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.028676 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-znmfv" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.123842 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-85d8bfd68d-h9vnb"] Jan 22 07:18:36 crc kubenswrapper[4982]: E0122 07:18:36.124211 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6053280f-39c8-494c-8e47-b0aad9d7f58e" containerName="placement-db-sync" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.124231 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6053280f-39c8-494c-8e47-b0aad9d7f58e" containerName="placement-db-sync" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.124381 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6053280f-39c8-494c-8e47-b0aad9d7f58e" containerName="placement-db-sync" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.125238 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.132395 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.132545 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.138143 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-btjc9" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.141468 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-85d8bfd68d-h9vnb"] Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.227042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-config-data\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.227104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x97zk\" (UniqueName: \"kubernetes.io/projected/733b14c4-1832-4215-8748-228dd7e5ceab-kube-api-access-x97zk\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.227425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-scripts\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.227658 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-combined-ca-bundle\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.227790 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733b14c4-1832-4215-8748-228dd7e5ceab-logs\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-scripts\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-combined-ca-bundle\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330203 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733b14c4-1832-4215-8748-228dd7e5ceab-logs\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330289 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-config-data\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x97zk\" (UniqueName: \"kubernetes.io/projected/733b14c4-1832-4215-8748-228dd7e5ceab-kube-api-access-x97zk\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.330788 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733b14c4-1832-4215-8748-228dd7e5ceab-logs\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.333655 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-scripts\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.334174 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-combined-ca-bundle\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.334529 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733b14c4-1832-4215-8748-228dd7e5ceab-config-data\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.349474 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x97zk\" (UniqueName: \"kubernetes.io/projected/733b14c4-1832-4215-8748-228dd7e5ceab-kube-api-access-x97zk\") pod \"placement-85d8bfd68d-h9vnb\" (UID: \"733b14c4-1832-4215-8748-228dd7e5ceab\") " pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.444522 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:36 crc kubenswrapper[4982]: I0122 07:18:36.928096 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-85d8bfd68d-h9vnb"] Jan 22 07:18:36 crc kubenswrapper[4982]: W0122 07:18:36.932210 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod733b14c4_1832_4215_8748_228dd7e5ceab.slice/crio-195acc73bca5f35a1cbbc7f3fab25c3d93087477ee1affe2899f3fc721e8192e WatchSource:0}: Error finding container 195acc73bca5f35a1cbbc7f3fab25c3d93087477ee1affe2899f3fc721e8192e: Status 404 returned error can't find the container with id 195acc73bca5f35a1cbbc7f3fab25c3d93087477ee1affe2899f3fc721e8192e Jan 22 07:18:37 crc kubenswrapper[4982]: I0122 07:18:37.035767 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85d8bfd68d-h9vnb" event={"ID":"733b14c4-1832-4215-8748-228dd7e5ceab","Type":"ContainerStarted","Data":"195acc73bca5f35a1cbbc7f3fab25c3d93087477ee1affe2899f3fc721e8192e"} Jan 22 07:18:38 crc kubenswrapper[4982]: I0122 07:18:38.044954 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85d8bfd68d-h9vnb" event={"ID":"733b14c4-1832-4215-8748-228dd7e5ceab","Type":"ContainerStarted","Data":"7958b3f53813710b4a9362bbfcef627da3578ba093feb75af931335ee3cee8ca"} Jan 22 07:18:38 crc kubenswrapper[4982]: I0122 07:18:38.045292 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85d8bfd68d-h9vnb" event={"ID":"733b14c4-1832-4215-8748-228dd7e5ceab","Type":"ContainerStarted","Data":"6bcbf532a14aeb0062d6d54435f674b91fd707f940277529cf6811e156aa881e"} Jan 22 07:18:38 crc kubenswrapper[4982]: I0122 07:18:38.045310 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:38 crc kubenswrapper[4982]: I0122 07:18:38.045321 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:18:38 crc kubenswrapper[4982]: I0122 07:18:38.073722 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-85d8bfd68d-h9vnb" podStartSLOduration=2.073696921 podStartE2EDuration="2.073696921s" podCreationTimestamp="2026-01-22 07:18:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:18:38.07142999 +0000 UTC m=+5578.910068003" watchObservedRunningTime="2026-01-22 07:18:38.073696921 +0000 UTC m=+5578.912334924" Jan 22 07:18:39 crc kubenswrapper[4982]: I0122 07:18:39.362498 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:18:39 crc kubenswrapper[4982]: I0122 07:18:39.426551 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:18:39 crc kubenswrapper[4982]: I0122 07:18:39.427769 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="dnsmasq-dns" containerID="cri-o://873376eef954133691a67bf10e722bf4ca08692abdb9a335d998553e60673c56" gracePeriod=10 Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.064602 4982 generic.go:334] "Generic (PLEG): container finished" podID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerID="873376eef954133691a67bf10e722bf4ca08692abdb9a335d998553e60673c56" exitCode=0 Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.064654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" event={"ID":"7b33a718-1da3-4ab4-a5bd-dcc7886a4944","Type":"ContainerDied","Data":"873376eef954133691a67bf10e722bf4ca08692abdb9a335d998553e60673c56"} Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.477683 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.629197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb\") pod \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.629315 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config\") pod \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.629512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc\") pod \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.629570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg8f5\" (UniqueName: \"kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5\") pod \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.629605 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb\") pod \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\" (UID: \"7b33a718-1da3-4ab4-a5bd-dcc7886a4944\") " Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.638621 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5" (OuterVolumeSpecName: "kube-api-access-xg8f5") pod "7b33a718-1da3-4ab4-a5bd-dcc7886a4944" (UID: "7b33a718-1da3-4ab4-a5bd-dcc7886a4944"). InnerVolumeSpecName "kube-api-access-xg8f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.692873 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b33a718-1da3-4ab4-a5bd-dcc7886a4944" (UID: "7b33a718-1da3-4ab4-a5bd-dcc7886a4944"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.693995 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b33a718-1da3-4ab4-a5bd-dcc7886a4944" (UID: "7b33a718-1da3-4ab4-a5bd-dcc7886a4944"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.707403 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config" (OuterVolumeSpecName: "config") pod "7b33a718-1da3-4ab4-a5bd-dcc7886a4944" (UID: "7b33a718-1da3-4ab4-a5bd-dcc7886a4944"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.713665 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b33a718-1da3-4ab4-a5bd-dcc7886a4944" (UID: "7b33a718-1da3-4ab4-a5bd-dcc7886a4944"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.731675 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.731714 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg8f5\" (UniqueName: \"kubernetes.io/projected/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-kube-api-access-xg8f5\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.731726 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.731734 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:40 crc kubenswrapper[4982]: I0122 07:18:40.731743 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b33a718-1da3-4ab4-a5bd-dcc7886a4944-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.078453 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" event={"ID":"7b33a718-1da3-4ab4-a5bd-dcc7886a4944","Type":"ContainerDied","Data":"ab709b124b8551649656ff704a30dff73d065eb9355eb8957ee290a7f297caae"} Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.078479 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4944f4c9-czwn4" Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.078870 4982 scope.go:117] "RemoveContainer" containerID="873376eef954133691a67bf10e722bf4ca08692abdb9a335d998553e60673c56" Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.112324 4982 scope.go:117] "RemoveContainer" containerID="f97d3993d601001ab575d379b4562718280b50003880533c1bd0658d92a1b909" Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.120694 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.142240 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c4944f4c9-czwn4"] Jan 22 07:18:41 crc kubenswrapper[4982]: I0122 07:18:41.737306 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" path="/var/lib/kubelet/pods/7b33a718-1da3-4ab4-a5bd-dcc7886a4944/volumes" Jan 22 07:18:44 crc kubenswrapper[4982]: I0122 07:18:44.719274 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:18:44 crc kubenswrapper[4982]: E0122 07:18:44.719783 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:18:56 crc kubenswrapper[4982]: I0122 07:18:56.719492 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:18:57 crc kubenswrapper[4982]: I0122 07:18:57.251148 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08"} Jan 22 07:19:07 crc kubenswrapper[4982]: I0122 07:19:07.751298 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:19:07 crc kubenswrapper[4982]: I0122 07:19:07.755785 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-85d8bfd68d-h9vnb" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.167159 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-w4dxj"] Jan 22 07:19:32 crc kubenswrapper[4982]: E0122 07:19:32.168134 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="init" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.168146 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="init" Jan 22 07:19:32 crc kubenswrapper[4982]: E0122 07:19:32.168179 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="dnsmasq-dns" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.168186 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="dnsmasq-dns" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.168345 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b33a718-1da3-4ab4-a5bd-dcc7886a4944" containerName="dnsmasq-dns" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.168930 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.177076 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-w4dxj"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.186912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfj2j\" (UniqueName: \"kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.186962 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.273728 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-dcvw4"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.274763 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.288686 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfj2j\" (UniqueName: \"kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.288734 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.288823 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.288898 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7h47\" (UniqueName: \"kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.289917 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.299364 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-dcvw4"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.312501 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfj2j\" (UniqueName: \"kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j\") pod \"nova-api-db-create-w4dxj\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.389939 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.390006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7h47\" (UniqueName: \"kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.391373 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.391417 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2b13-account-create-update-8xmrq"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.392340 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.401217 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.410519 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2b13-account-create-update-8xmrq"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.431542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7h47\" (UniqueName: \"kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47\") pod \"nova-cell0-db-create-dcvw4\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.486661 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.497042 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5t2rq"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.498322 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.504252 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5t2rq"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.585413 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-77bf-account-create-update-vqzlx"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.588360 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.588653 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.591725 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.592809 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b89r\" (UniqueName: \"kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.592903 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.614913 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-77bf-account-create-update-vqzlx"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pkd2\" (UniqueName: \"kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694628 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b89r\" (UniqueName: \"kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694671 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxfhs\" (UniqueName: \"kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694700 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694774 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.694926 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.697558 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.713678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b89r\" (UniqueName: \"kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r\") pod \"nova-api-2b13-account-create-update-8xmrq\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.763051 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.799911 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pkd2\" (UniqueName: \"kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.799980 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxfhs\" (UniqueName: \"kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.800006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.800167 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.799972 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2ffa-account-create-update-g8pwp"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.802104 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.802113 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.804010 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.807565 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.819070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pkd2\" (UniqueName: \"kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2\") pod \"nova-cell0-77bf-account-create-update-vqzlx\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.823158 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxfhs\" (UniqueName: \"kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs\") pod \"nova-cell1-db-create-5t2rq\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.836079 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2ffa-account-create-update-g8pwp"] Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.884657 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:32 crc kubenswrapper[4982]: I0122 07:19:32.912253 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.004098 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47znz\" (UniqueName: \"kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.004201 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.082102 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-w4dxj"] Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.106551 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.106681 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47znz\" (UniqueName: \"kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.107751 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.129986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47znz\" (UniqueName: \"kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz\") pod \"nova-cell1-2ffa-account-create-update-g8pwp\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.133790 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.237014 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-dcvw4"] Jan 22 07:19:33 crc kubenswrapper[4982]: W0122 07:19:33.241779 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda220695a_2676_40c9_9488_78b2e093e1cb.slice/crio-b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e WatchSource:0}: Error finding container b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e: Status 404 returned error can't find the container with id b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.391340 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2b13-account-create-update-8xmrq"] Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.492589 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-77bf-account-create-update-vqzlx"] Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.502228 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5t2rq"] Jan 22 07:19:33 crc kubenswrapper[4982]: W0122 07:19:33.514185 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2d11bc2_7757_402a_b4b6_fe5578b86213.slice/crio-62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b WatchSource:0}: Error finding container 62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b: Status 404 returned error can't find the container with id 62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.583086 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" event={"ID":"d2e7c369-7f5b-48b3-9303-7ec29f9fd757","Type":"ContainerStarted","Data":"6c5c2cfc4e42efe82797378958bd09c1e139a460da652a277ae61e80b306e4ec"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.584750 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2b13-account-create-update-8xmrq" event={"ID":"b6a44b3b-7668-4335-bf14-926e8b32adba","Type":"ContainerStarted","Data":"5ee9058add69c664e035df1b2adab72d62d5ebc8a5ef65c8467ea801ddf36543"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.587293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w4dxj" event={"ID":"c4366c97-1431-4d9e-9531-3ebf64880826","Type":"ContainerStarted","Data":"fe5235f9b57df932353fb0dc56092f5b393a9d3584d5b397e5c4b59133f774c0"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.587355 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w4dxj" event={"ID":"c4366c97-1431-4d9e-9531-3ebf64880826","Type":"ContainerStarted","Data":"e98c0364a8765ee38cb91c9fc1938264892498276fd1e633c73555ddce02f15c"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.588345 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-dcvw4" event={"ID":"a220695a-2676-40c9-9488-78b2e093e1cb","Type":"ContainerStarted","Data":"b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.589364 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5t2rq" event={"ID":"e2d11bc2-7757-402a-b4b6-fe5578b86213","Type":"ContainerStarted","Data":"62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b"} Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.612586 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-w4dxj" podStartSLOduration=1.612566641 podStartE2EDuration="1.612566641s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:33.606095896 +0000 UTC m=+5634.444733909" watchObservedRunningTime="2026-01-22 07:19:33.612566641 +0000 UTC m=+5634.451204634" Jan 22 07:19:33 crc kubenswrapper[4982]: I0122 07:19:33.779668 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2ffa-account-create-update-g8pwp"] Jan 22 07:19:33 crc kubenswrapper[4982]: W0122 07:19:33.786167 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b6dda5b_717b_4f03_a049_2b848c0c2c27.slice/crio-7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3 WatchSource:0}: Error finding container 7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3: Status 404 returned error can't find the container with id 7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3 Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.613589 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" event={"ID":"5b6dda5b-717b-4f03-a049-2b848c0c2c27","Type":"ContainerStarted","Data":"7f867cdebd63b42ebf9d53880264a14570c17898cb5dc6866ca8ccf70d50aa98"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.613951 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" event={"ID":"5b6dda5b-717b-4f03-a049-2b848c0c2c27","Type":"ContainerStarted","Data":"7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.617245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5t2rq" event={"ID":"e2d11bc2-7757-402a-b4b6-fe5578b86213","Type":"ContainerStarted","Data":"a3a01d13c170a6034340b48ac2dbb794eb0908be81e280a57a677eb463959afa"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.621930 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" event={"ID":"d2e7c369-7f5b-48b3-9303-7ec29f9fd757","Type":"ContainerStarted","Data":"50fce7693c69c8d79de602b70d1b2919800743a223649ffca9552985d97c472e"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.625786 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2b13-account-create-update-8xmrq" event={"ID":"b6a44b3b-7668-4335-bf14-926e8b32adba","Type":"ContainerStarted","Data":"4c5c84709f67855d277c1a243c9c35f9cf59362bfaeee6ddf99ee353419a5fc6"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.630958 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-dcvw4" event={"ID":"a220695a-2676-40c9-9488-78b2e093e1cb","Type":"ContainerStarted","Data":"b8bf00fe84e1522399135c08d3c7c5dae6b6b0128140a0866ca394a1f7660949"} Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.631785 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" podStartSLOduration=2.631762923 podStartE2EDuration="2.631762923s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:34.626731597 +0000 UTC m=+5635.465369600" watchObservedRunningTime="2026-01-22 07:19:34.631762923 +0000 UTC m=+5635.470400926" Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.658999 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-5t2rq" podStartSLOduration=2.658979677 podStartE2EDuration="2.658979677s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:34.652840452 +0000 UTC m=+5635.491478465" watchObservedRunningTime="2026-01-22 07:19:34.658979677 +0000 UTC m=+5635.497617690" Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.662542 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-2b13-account-create-update-8xmrq" podStartSLOduration=2.662531613 podStartE2EDuration="2.662531613s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:34.641138586 +0000 UTC m=+5635.479776589" watchObservedRunningTime="2026-01-22 07:19:34.662531613 +0000 UTC m=+5635.501169636" Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.676071 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" podStartSLOduration=2.6760508979999997 podStartE2EDuration="2.676050898s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:34.669656925 +0000 UTC m=+5635.508294928" watchObservedRunningTime="2026-01-22 07:19:34.676050898 +0000 UTC m=+5635.514688901" Jan 22 07:19:34 crc kubenswrapper[4982]: I0122 07:19:34.687541 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-dcvw4" podStartSLOduration=2.687522337 podStartE2EDuration="2.687522337s" podCreationTimestamp="2026-01-22 07:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:34.681927746 +0000 UTC m=+5635.520565739" watchObservedRunningTime="2026-01-22 07:19:34.687522337 +0000 UTC m=+5635.526160340" Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.641659 4982 generic.go:334] "Generic (PLEG): container finished" podID="5b6dda5b-717b-4f03-a049-2b848c0c2c27" containerID="7f867cdebd63b42ebf9d53880264a14570c17898cb5dc6866ca8ccf70d50aa98" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.641912 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" event={"ID":"5b6dda5b-717b-4f03-a049-2b848c0c2c27","Type":"ContainerDied","Data":"7f867cdebd63b42ebf9d53880264a14570c17898cb5dc6866ca8ccf70d50aa98"} Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.645496 4982 generic.go:334] "Generic (PLEG): container finished" podID="e2d11bc2-7757-402a-b4b6-fe5578b86213" containerID="a3a01d13c170a6034340b48ac2dbb794eb0908be81e280a57a677eb463959afa" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.645610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5t2rq" event={"ID":"e2d11bc2-7757-402a-b4b6-fe5578b86213","Type":"ContainerDied","Data":"a3a01d13c170a6034340b48ac2dbb794eb0908be81e280a57a677eb463959afa"} Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.649964 4982 generic.go:334] "Generic (PLEG): container finished" podID="d2e7c369-7f5b-48b3-9303-7ec29f9fd757" containerID="50fce7693c69c8d79de602b70d1b2919800743a223649ffca9552985d97c472e" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.650068 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" event={"ID":"d2e7c369-7f5b-48b3-9303-7ec29f9fd757","Type":"ContainerDied","Data":"50fce7693c69c8d79de602b70d1b2919800743a223649ffca9552985d97c472e"} Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.651931 4982 generic.go:334] "Generic (PLEG): container finished" podID="b6a44b3b-7668-4335-bf14-926e8b32adba" containerID="4c5c84709f67855d277c1a243c9c35f9cf59362bfaeee6ddf99ee353419a5fc6" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.652060 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2b13-account-create-update-8xmrq" event={"ID":"b6a44b3b-7668-4335-bf14-926e8b32adba","Type":"ContainerDied","Data":"4c5c84709f67855d277c1a243c9c35f9cf59362bfaeee6ddf99ee353419a5fc6"} Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.653447 4982 generic.go:334] "Generic (PLEG): container finished" podID="c4366c97-1431-4d9e-9531-3ebf64880826" containerID="fe5235f9b57df932353fb0dc56092f5b393a9d3584d5b397e5c4b59133f774c0" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.653561 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w4dxj" event={"ID":"c4366c97-1431-4d9e-9531-3ebf64880826","Type":"ContainerDied","Data":"fe5235f9b57df932353fb0dc56092f5b393a9d3584d5b397e5c4b59133f774c0"} Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.655598 4982 generic.go:334] "Generic (PLEG): container finished" podID="a220695a-2676-40c9-9488-78b2e093e1cb" containerID="b8bf00fe84e1522399135c08d3c7c5dae6b6b0128140a0866ca394a1f7660949" exitCode=0 Jan 22 07:19:35 crc kubenswrapper[4982]: I0122 07:19:35.655692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-dcvw4" event={"ID":"a220695a-2676-40c9-9488-78b2e093e1cb","Type":"ContainerDied","Data":"b8bf00fe84e1522399135c08d3c7c5dae6b6b0128140a0866ca394a1f7660949"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.096609 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.198715 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts\") pod \"b6a44b3b-7668-4335-bf14-926e8b32adba\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.198921 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b89r\" (UniqueName: \"kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r\") pod \"b6a44b3b-7668-4335-bf14-926e8b32adba\" (UID: \"b6a44b3b-7668-4335-bf14-926e8b32adba\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.200626 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6a44b3b-7668-4335-bf14-926e8b32adba" (UID: "b6a44b3b-7668-4335-bf14-926e8b32adba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.204364 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r" (OuterVolumeSpecName: "kube-api-access-9b89r") pod "b6a44b3b-7668-4335-bf14-926e8b32adba" (UID: "b6a44b3b-7668-4335-bf14-926e8b32adba"). InnerVolumeSpecName "kube-api-access-9b89r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.274986 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.281070 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.290346 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.299830 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfj2j\" (UniqueName: \"kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j\") pod \"c4366c97-1431-4d9e-9531-3ebf64880826\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.300005 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7h47\" (UniqueName: \"kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47\") pod \"a220695a-2676-40c9-9488-78b2e093e1cb\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.300056 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts\") pod \"a220695a-2676-40c9-9488-78b2e093e1cb\" (UID: \"a220695a-2676-40c9-9488-78b2e093e1cb\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.300163 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts\") pod \"c4366c97-1431-4d9e-9531-3ebf64880826\" (UID: \"c4366c97-1431-4d9e-9531-3ebf64880826\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.300526 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b89r\" (UniqueName: \"kubernetes.io/projected/b6a44b3b-7668-4335-bf14-926e8b32adba-kube-api-access-9b89r\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.300539 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6a44b3b-7668-4335-bf14-926e8b32adba-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.301658 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4366c97-1431-4d9e-9531-3ebf64880826" (UID: "c4366c97-1431-4d9e-9531-3ebf64880826"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.301670 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a220695a-2676-40c9-9488-78b2e093e1cb" (UID: "a220695a-2676-40c9-9488-78b2e093e1cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.309517 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47" (OuterVolumeSpecName: "kube-api-access-k7h47") pod "a220695a-2676-40c9-9488-78b2e093e1cb" (UID: "a220695a-2676-40c9-9488-78b2e093e1cb"). InnerVolumeSpecName "kube-api-access-k7h47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.310216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j" (OuterVolumeSpecName: "kube-api-access-tfj2j") pod "c4366c97-1431-4d9e-9531-3ebf64880826" (UID: "c4366c97-1431-4d9e-9531-3ebf64880826"). InnerVolumeSpecName "kube-api-access-tfj2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.312281 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.334292 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.400805 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts\") pod \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401176 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxfhs\" (UniqueName: \"kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs\") pod \"e2d11bc2-7757-402a-b4b6-fe5578b86213\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401265 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47znz\" (UniqueName: \"kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz\") pod \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401388 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts\") pod \"e2d11bc2-7757-402a-b4b6-fe5578b86213\" (UID: \"e2d11bc2-7757-402a-b4b6-fe5578b86213\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401458 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pkd2\" (UniqueName: \"kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2\") pod \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\" (UID: \"d2e7c369-7f5b-48b3-9303-7ec29f9fd757\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401541 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts\") pod \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\" (UID: \"5b6dda5b-717b-4f03-a049-2b848c0c2c27\") " Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401824 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7h47\" (UniqueName: \"kubernetes.io/projected/a220695a-2676-40c9-9488-78b2e093e1cb-kube-api-access-k7h47\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401904 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a220695a-2676-40c9-9488-78b2e093e1cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.401968 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4366c97-1431-4d9e-9531-3ebf64880826-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.402038 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfj2j\" (UniqueName: \"kubernetes.io/projected/c4366c97-1431-4d9e-9531-3ebf64880826-kube-api-access-tfj2j\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.402003 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2d11bc2-7757-402a-b4b6-fe5578b86213" (UID: "e2d11bc2-7757-402a-b4b6-fe5578b86213"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.402242 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d2e7c369-7f5b-48b3-9303-7ec29f9fd757" (UID: "d2e7c369-7f5b-48b3-9303-7ec29f9fd757"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.402286 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5b6dda5b-717b-4f03-a049-2b848c0c2c27" (UID: "5b6dda5b-717b-4f03-a049-2b848c0c2c27"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.404841 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs" (OuterVolumeSpecName: "kube-api-access-dxfhs") pod "e2d11bc2-7757-402a-b4b6-fe5578b86213" (UID: "e2d11bc2-7757-402a-b4b6-fe5578b86213"). InnerVolumeSpecName "kube-api-access-dxfhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.404986 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2" (OuterVolumeSpecName: "kube-api-access-7pkd2") pod "d2e7c369-7f5b-48b3-9303-7ec29f9fd757" (UID: "d2e7c369-7f5b-48b3-9303-7ec29f9fd757"). InnerVolumeSpecName "kube-api-access-7pkd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.405234 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz" (OuterVolumeSpecName: "kube-api-access-47znz") pod "5b6dda5b-717b-4f03-a049-2b848c0c2c27" (UID: "5b6dda5b-717b-4f03-a049-2b848c0c2c27"). InnerVolumeSpecName "kube-api-access-47znz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503843 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b6dda5b-717b-4f03-a049-2b848c0c2c27-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503915 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503929 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxfhs\" (UniqueName: \"kubernetes.io/projected/e2d11bc2-7757-402a-b4b6-fe5578b86213-kube-api-access-dxfhs\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503941 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47znz\" (UniqueName: \"kubernetes.io/projected/5b6dda5b-717b-4f03-a049-2b848c0c2c27-kube-api-access-47znz\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503951 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2d11bc2-7757-402a-b4b6-fe5578b86213-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.503960 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pkd2\" (UniqueName: \"kubernetes.io/projected/d2e7c369-7f5b-48b3-9303-7ec29f9fd757-kube-api-access-7pkd2\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.679504 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5t2rq" event={"ID":"e2d11bc2-7757-402a-b4b6-fe5578b86213","Type":"ContainerDied","Data":"62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.679781 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62b168989d40dddaaa00b76beced8f3ce40ce9cd1edb5dc5e49ef6d41f235e2b" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.679966 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5t2rq" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.683466 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" event={"ID":"d2e7c369-7f5b-48b3-9303-7ec29f9fd757","Type":"ContainerDied","Data":"6c5c2cfc4e42efe82797378958bd09c1e139a460da652a277ae61e80b306e4ec"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.683523 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c5c2cfc4e42efe82797378958bd09c1e139a460da652a277ae61e80b306e4ec" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.683639 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-77bf-account-create-update-vqzlx" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.694482 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2b13-account-create-update-8xmrq" event={"ID":"b6a44b3b-7668-4335-bf14-926e8b32adba","Type":"ContainerDied","Data":"5ee9058add69c664e035df1b2adab72d62d5ebc8a5ef65c8467ea801ddf36543"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.694526 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ee9058add69c664e035df1b2adab72d62d5ebc8a5ef65c8467ea801ddf36543" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.694584 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2b13-account-create-update-8xmrq" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.702387 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w4dxj" event={"ID":"c4366c97-1431-4d9e-9531-3ebf64880826","Type":"ContainerDied","Data":"e98c0364a8765ee38cb91c9fc1938264892498276fd1e633c73555ddce02f15c"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.702446 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e98c0364a8765ee38cb91c9fc1938264892498276fd1e633c73555ddce02f15c" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.702534 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w4dxj" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.712685 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-dcvw4" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.712733 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-dcvw4" event={"ID":"a220695a-2676-40c9-9488-78b2e093e1cb","Type":"ContainerDied","Data":"b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.712785 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b829557af821b84e0979330e8c45960c59b59e23b3fa32a24573bed454ac3b1e" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.717203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" event={"ID":"5b6dda5b-717b-4f03-a049-2b848c0c2c27","Type":"ContainerDied","Data":"7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3"} Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.717249 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7427434d8c5689806049e763ed05ac43d1b8f6944ee26fe451989b9ad93294a3" Jan 22 07:19:37 crc kubenswrapper[4982]: I0122 07:19:37.717357 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2ffa-account-create-update-g8pwp" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.799933 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tlplj"] Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800658 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4366c97-1431-4d9e-9531-3ebf64880826" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800677 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4366c97-1431-4d9e-9531-3ebf64880826" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800692 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2e7c369-7f5b-48b3-9303-7ec29f9fd757" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800702 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2e7c369-7f5b-48b3-9303-7ec29f9fd757" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800724 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a220695a-2676-40c9-9488-78b2e093e1cb" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800733 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a220695a-2676-40c9-9488-78b2e093e1cb" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800747 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2d11bc2-7757-402a-b4b6-fe5578b86213" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800755 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2d11bc2-7757-402a-b4b6-fe5578b86213" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800778 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6a44b3b-7668-4335-bf14-926e8b32adba" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800786 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6a44b3b-7668-4335-bf14-926e8b32adba" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: E0122 07:19:42.800799 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6dda5b-717b-4f03-a049-2b848c0c2c27" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.800807 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6dda5b-717b-4f03-a049-2b848c0c2c27" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801344 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a220695a-2676-40c9-9488-78b2e093e1cb" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801372 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2d11bc2-7757-402a-b4b6-fe5578b86213" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801386 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4366c97-1431-4d9e-9531-3ebf64880826" containerName="mariadb-database-create" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801400 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2e7c369-7f5b-48b3-9303-7ec29f9fd757" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801415 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b6dda5b-717b-4f03-a049-2b848c0c2c27" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.801425 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6a44b3b-7668-4335-bf14-926e8b32adba" containerName="mariadb-account-create-update" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.802171 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.803980 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hkqz6" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.804195 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.805277 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.821140 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tlplj"] Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.893757 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.893844 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.893955 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26vrd\" (UniqueName: \"kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.893980 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.995362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26vrd\" (UniqueName: \"kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.995423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.995525 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:42 crc kubenswrapper[4982]: I0122 07:19:42.995574 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.003730 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.004341 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.004441 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.017556 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26vrd\" (UniqueName: \"kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd\") pod \"nova-cell0-conductor-db-sync-tlplj\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.119651 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.599834 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tlplj"] Jan 22 07:19:43 crc kubenswrapper[4982]: I0122 07:19:43.776143 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tlplj" event={"ID":"54b1df53-5b7e-450b-bb04-f93da76ad0a6","Type":"ContainerStarted","Data":"fc50c4d27223c3e35f306dd2bc3724a53b2d9b7f18ee829f1e1bf510620abe6b"} Jan 22 07:19:44 crc kubenswrapper[4982]: I0122 07:19:44.784835 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tlplj" event={"ID":"54b1df53-5b7e-450b-bb04-f93da76ad0a6","Type":"ContainerStarted","Data":"c8200c57fc403fcffa3f29e4a00cd1fecb42fbb2954e1a00d40db5ec9b05c92a"} Jan 22 07:19:44 crc kubenswrapper[4982]: I0122 07:19:44.817065 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-tlplj" podStartSLOduration=2.817047784 podStartE2EDuration="2.817047784s" podCreationTimestamp="2026-01-22 07:19:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:44.810764565 +0000 UTC m=+5645.649402588" watchObservedRunningTime="2026-01-22 07:19:44.817047784 +0000 UTC m=+5645.655685787" Jan 22 07:19:54 crc kubenswrapper[4982]: I0122 07:19:54.887568 4982 generic.go:334] "Generic (PLEG): container finished" podID="54b1df53-5b7e-450b-bb04-f93da76ad0a6" containerID="c8200c57fc403fcffa3f29e4a00cd1fecb42fbb2954e1a00d40db5ec9b05c92a" exitCode=0 Jan 22 07:19:54 crc kubenswrapper[4982]: I0122 07:19:54.888263 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tlplj" event={"ID":"54b1df53-5b7e-450b-bb04-f93da76ad0a6","Type":"ContainerDied","Data":"c8200c57fc403fcffa3f29e4a00cd1fecb42fbb2954e1a00d40db5ec9b05c92a"} Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.225799 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.333156 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts\") pod \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.333252 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle\") pod \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.333352 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data\") pod \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.333436 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26vrd\" (UniqueName: \"kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd\") pod \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\" (UID: \"54b1df53-5b7e-450b-bb04-f93da76ad0a6\") " Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.339153 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts" (OuterVolumeSpecName: "scripts") pod "54b1df53-5b7e-450b-bb04-f93da76ad0a6" (UID: "54b1df53-5b7e-450b-bb04-f93da76ad0a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.339254 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd" (OuterVolumeSpecName: "kube-api-access-26vrd") pod "54b1df53-5b7e-450b-bb04-f93da76ad0a6" (UID: "54b1df53-5b7e-450b-bb04-f93da76ad0a6"). InnerVolumeSpecName "kube-api-access-26vrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.356917 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54b1df53-5b7e-450b-bb04-f93da76ad0a6" (UID: "54b1df53-5b7e-450b-bb04-f93da76ad0a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.363953 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data" (OuterVolumeSpecName: "config-data") pod "54b1df53-5b7e-450b-bb04-f93da76ad0a6" (UID: "54b1df53-5b7e-450b-bb04-f93da76ad0a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.435144 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.435186 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.435195 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26vrd\" (UniqueName: \"kubernetes.io/projected/54b1df53-5b7e-450b-bb04-f93da76ad0a6-kube-api-access-26vrd\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.435208 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54b1df53-5b7e-450b-bb04-f93da76ad0a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.906917 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tlplj" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.906814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tlplj" event={"ID":"54b1df53-5b7e-450b-bb04-f93da76ad0a6","Type":"ContainerDied","Data":"fc50c4d27223c3e35f306dd2bc3724a53b2d9b7f18ee829f1e1bf510620abe6b"} Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.912639 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc50c4d27223c3e35f306dd2bc3724a53b2d9b7f18ee829f1e1bf510620abe6b" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.993926 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:19:56 crc kubenswrapper[4982]: E0122 07:19:56.994389 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b1df53-5b7e-450b-bb04-f93da76ad0a6" containerName="nova-cell0-conductor-db-sync" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.994411 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b1df53-5b7e-450b-bb04-f93da76ad0a6" containerName="nova-cell0-conductor-db-sync" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.994627 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="54b1df53-5b7e-450b-bb04-f93da76ad0a6" containerName="nova-cell0-conductor-db-sync" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.995417 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.997406 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 07:19:56 crc kubenswrapper[4982]: I0122 07:19:56.997630 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hkqz6" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.020841 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.046311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.046402 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sr4m\" (UniqueName: \"kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.046476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.148097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sr4m\" (UniqueName: \"kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.148214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.148319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.152536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.152677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.164903 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sr4m\" (UniqueName: \"kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m\") pod \"nova-cell0-conductor-0\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.321113 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.863748 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:19:57 crc kubenswrapper[4982]: I0122 07:19:57.917039 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d238384e-a7ec-4417-8c61-362fb936cd7b","Type":"ContainerStarted","Data":"7e92886b5dcea560f69c416ff1912c8e9aca9cdde07ac9ceee6f6a30bfcc6eb0"} Jan 22 07:19:58 crc kubenswrapper[4982]: I0122 07:19:58.925508 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d238384e-a7ec-4417-8c61-362fb936cd7b","Type":"ContainerStarted","Data":"c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688"} Jan 22 07:19:58 crc kubenswrapper[4982]: I0122 07:19:58.925819 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 07:19:58 crc kubenswrapper[4982]: I0122 07:19:58.945836 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.945816946 podStartE2EDuration="2.945816946s" podCreationTimestamp="2026-01-22 07:19:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:19:58.939038723 +0000 UTC m=+5659.777676746" watchObservedRunningTime="2026-01-22 07:19:58.945816946 +0000 UTC m=+5659.784454949" Jan 22 07:20:03 crc kubenswrapper[4982]: I0122 07:20:03.655778 4982 scope.go:117] "RemoveContainer" containerID="7e4ba074c1295db7c25f45c60a840ae2253044da50ae6a74db5e51c3bfc0b984" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.347818 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.794173 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-w2b2h"] Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.795724 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.798217 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.798423 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.805064 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w2b2h"] Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.907932 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.908001 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5s9q\" (UniqueName: \"kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.908035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.908073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.925754 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.927182 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.930073 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 07:20:07 crc kubenswrapper[4982]: I0122 07:20:07.937089 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009059 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009123 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009168 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009206 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009230 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009247 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st8jk\" (UniqueName: \"kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5s9q\" (UniqueName: \"kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.009305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.017050 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.026621 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.027698 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.034253 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.038005 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.050492 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.056455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5s9q\" (UniqueName: \"kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q\") pod \"nova-cell0-cell-mapping-w2b2h\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.066380 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.114533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.132919 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.132977 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133078 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133192 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133212 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st8jk\" (UniqueName: \"kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133252 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jcc4\" (UniqueName: \"kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133383 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133446 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.133983 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.139743 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.142875 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.150587 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.153442 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.154570 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.172817 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st8jk\" (UniqueName: \"kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.176832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data\") pod \"nova-api-0\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.221959 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.225990 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.237803 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239052 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5llg\" (UniqueName: \"kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239113 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239204 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jcc4\" (UniqueName: \"kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239253 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.239745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.242248 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.246440 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.256416 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.257614 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.260535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.275040 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.277804 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.278845 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jcc4\" (UniqueName: \"kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4\") pod \"nova-metadata-0\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.340912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.340972 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqsr7\" (UniqueName: \"kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341016 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341108 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341151 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341261 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341294 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341313 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5llg\" (UniqueName: \"kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.341387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbtgq\" (UniqueName: \"kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.347206 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.348303 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.356755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5llg\" (UniqueName: \"kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg\") pod \"nova-scheduler-0\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443746 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443789 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqsr7\" (UniqueName: \"kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443841 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443898 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443947 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.443973 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.444000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbtgq\" (UniqueName: \"kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.445068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.445145 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.445277 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.445622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.448192 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.448499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.459538 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbtgq\" (UniqueName: \"kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq\") pod \"nova-cell1-novncproxy-0\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.460396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqsr7\" (UniqueName: \"kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7\") pod \"dnsmasq-dns-59954b654c-lllgq\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.519571 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.548386 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.604329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.619285 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.645488 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w2b2h"] Jan 22 07:20:08 crc kubenswrapper[4982]: W0122 07:20:08.685477 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61a89c15_5367_4977_8be9_80f16a2ef3a5.slice/crio-d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8 WatchSource:0}: Error finding container d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8: Status 404 returned error can't find the container with id d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8 Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.790737 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.849928 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kszzd"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.851173 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.854264 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.855250 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.883707 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kszzd"] Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.961206 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.961285 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.961371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:08 crc kubenswrapper[4982]: I0122 07:20:08.961403 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wkzp\" (UniqueName: \"kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.029344 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerStarted","Data":"013072656db1f260e5ed3ced2b5bbdc1b142d05022cdc59210cad12d85411bad"} Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.032818 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w2b2h" event={"ID":"61a89c15-5367-4977-8be9-80f16a2ef3a5","Type":"ContainerStarted","Data":"d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8"} Jan 22 07:20:09 crc kubenswrapper[4982]: W0122 07:20:09.060170 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb94b54e1_6f53_486e_aa50_2aafe9f78c26.slice/crio-18f97fdb07365a92be064a5143fbd82d907eab2db0b6fb3ff9985b2c47841a1f WatchSource:0}: Error finding container 18f97fdb07365a92be064a5143fbd82d907eab2db0b6fb3ff9985b2c47841a1f: Status 404 returned error can't find the container with id 18f97fdb07365a92be064a5143fbd82d907eab2db0b6fb3ff9985b2c47841a1f Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.061291 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.062702 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.063555 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.063693 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.063744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wkzp\" (UniqueName: \"kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.071461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.075195 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.076127 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.081396 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wkzp\" (UniqueName: \"kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp\") pod \"nova-cell1-conductor-db-sync-kszzd\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.177965 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.181952 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.194975 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.280015 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:20:09 crc kubenswrapper[4982]: W0122 07:20:09.286017 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod268f96af_7a77_4afb_85c5_f39480ba4968.slice/crio-6073782d2b900315742335d44a9b76442d79d785639579c1ff19d04615c46f30 WatchSource:0}: Error finding container 6073782d2b900315742335d44a9b76442d79d785639579c1ff19d04615c46f30: Status 404 returned error can't find the container with id 6073782d2b900315742335d44a9b76442d79d785639579c1ff19d04615c46f30 Jan 22 07:20:09 crc kubenswrapper[4982]: I0122 07:20:09.684134 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kszzd"] Jan 22 07:20:09 crc kubenswrapper[4982]: W0122 07:20:09.685447 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea8cecab_7e99_40fe_93da_eb0a9427a422.slice/crio-38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979 WatchSource:0}: Error finding container 38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979: Status 404 returned error can't find the container with id 38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979 Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.079142 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"500eb623-9eb7-41c6-938b-029cb9807cce","Type":"ContainerStarted","Data":"f8190e9200eaa63dfc1cda958d79266c85e240f81e0345bb8c7db3bdd52da720"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.079212 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"500eb623-9eb7-41c6-938b-029cb9807cce","Type":"ContainerStarted","Data":"890ba15d77be1fad6520a7ed0c92132be0ae80e2adc5f43dc082a6485bf8c6ff"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.100477 4982 generic.go:334] "Generic (PLEG): container finished" podID="268f96af-7a77-4afb-85c5-f39480ba4968" containerID="e841a55ecef172e405741273a9f3c10b2d646fe228b371fd81c93f9918617296" exitCode=0 Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.100941 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59954b654c-lllgq" event={"ID":"268f96af-7a77-4afb-85c5-f39480ba4968","Type":"ContainerDied","Data":"e841a55ecef172e405741273a9f3c10b2d646fe228b371fd81c93f9918617296"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.101357 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59954b654c-lllgq" event={"ID":"268f96af-7a77-4afb-85c5-f39480ba4968","Type":"ContainerStarted","Data":"6073782d2b900315742335d44a9b76442d79d785639579c1ff19d04615c46f30"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.128470 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2455cd98-0827-4b25-8a4f-21514968b2ea","Type":"ContainerStarted","Data":"500ce2176da325fb5e94f133f09a7de2be123fa6c5c5916965f3755057860ef0"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.128523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2455cd98-0827-4b25-8a4f-21514968b2ea","Type":"ContainerStarted","Data":"bd9c890456ee904d4f3b03a7b512924b193882d250f715b3a1a026664077acf7"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.146351 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.146333811 podStartE2EDuration="2.146333811s" podCreationTimestamp="2026-01-22 07:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:10.145332344 +0000 UTC m=+5670.983970347" watchObservedRunningTime="2026-01-22 07:20:10.146333811 +0000 UTC m=+5670.984971814" Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.151582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerStarted","Data":"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.151636 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerStarted","Data":"18f97fdb07365a92be064a5143fbd82d907eab2db0b6fb3ff9985b2c47841a1f"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.161922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kszzd" event={"ID":"ea8cecab-7e99-40fe-93da-eb0a9427a422","Type":"ContainerStarted","Data":"38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.163580 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w2b2h" event={"ID":"61a89c15-5367-4977-8be9-80f16a2ef3a5","Type":"ContainerStarted","Data":"276bc140d3653f77c7c4033e854c9e0b888327a783659c51968322d8e2fd52ed"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.172241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerStarted","Data":"2782313b336cdc7a7befeae93234349361058999655c9e01fec69c3ebd539c83"} Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.188913 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.188893539 podStartE2EDuration="2.188893539s" podCreationTimestamp="2026-01-22 07:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:10.18853643 +0000 UTC m=+5671.027174433" watchObservedRunningTime="2026-01-22 07:20:10.188893539 +0000 UTC m=+5671.027531542" Jan 22 07:20:10 crc kubenswrapper[4982]: I0122 07:20:10.274901 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-w2b2h" podStartSLOduration=3.2748401879999998 podStartE2EDuration="3.274840188s" podCreationTimestamp="2026-01-22 07:20:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:10.266118782 +0000 UTC m=+5671.104756795" watchObservedRunningTime="2026-01-22 07:20:10.274840188 +0000 UTC m=+5671.113478191" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.180644 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerStarted","Data":"3f9f582dc5ad0af958975ce1ddf7c437b6bf1e174b4bd806ae44bb1308e61b70"} Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.638204 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.640112 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.654597 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.736980 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.737049 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjf5s\" (UniqueName: \"kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.737114 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.838695 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjf5s\" (UniqueName: \"kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.838786 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.839007 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.839578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.840018 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.870900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjf5s\" (UniqueName: \"kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s\") pod \"certified-operators-ht6kq\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:11 crc kubenswrapper[4982]: I0122 07:20:11.956262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.208362 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59954b654c-lllgq" event={"ID":"268f96af-7a77-4afb-85c5-f39480ba4968","Type":"ContainerStarted","Data":"120235f9d9478b2e75972df683a673a05304f0d5e4733caa7c187b972a87fdd7"} Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.208931 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.216397 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerStarted","Data":"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e"} Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.235649 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kszzd" event={"ID":"ea8cecab-7e99-40fe-93da-eb0a9427a422","Type":"ContainerStarted","Data":"37dd053aa395863a7ed087aef355f968037903e695d5b8024276bcf8476373a1"} Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.245940 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59954b654c-lllgq" podStartSLOduration=4.245924786 podStartE2EDuration="4.245924786s" podCreationTimestamp="2026-01-22 07:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:12.240796027 +0000 UTC m=+5673.079434030" watchObservedRunningTime="2026-01-22 07:20:12.245924786 +0000 UTC m=+5673.084562789" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.260477 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-kszzd" podStartSLOduration=4.260456027 podStartE2EDuration="4.260456027s" podCreationTimestamp="2026-01-22 07:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:12.254483756 +0000 UTC m=+5673.093121779" watchObservedRunningTime="2026-01-22 07:20:12.260456027 +0000 UTC m=+5673.099094030" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.285608 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=5.285589185 podStartE2EDuration="5.285589185s" podCreationTimestamp="2026-01-22 07:20:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:12.282682217 +0000 UTC m=+5673.121320220" watchObservedRunningTime="2026-01-22 07:20:12.285589185 +0000 UTC m=+5673.124227188" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.323029 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=5.322993835 podStartE2EDuration="5.322993835s" podCreationTimestamp="2026-01-22 07:20:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:12.300024125 +0000 UTC m=+5673.138662138" watchObservedRunningTime="2026-01-22 07:20:12.322993835 +0000 UTC m=+5673.161631838" Jan 22 07:20:12 crc kubenswrapper[4982]: I0122 07:20:12.469766 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.243527 4982 generic.go:334] "Generic (PLEG): container finished" podID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerID="20d3e258c153970ca3181298c669cfa7b0012cf5071a4568d73409fe0911a29b" exitCode=0 Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.243705 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerDied","Data":"20d3e258c153970ca3181298c669cfa7b0012cf5071a4568d73409fe0911a29b"} Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.243987 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerStarted","Data":"b9ba92e4dab4ff201a01128d534c231a7b8df4a25cd071479d41e5c33ddbc170"} Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.245414 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.519981 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.520127 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.548540 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 07:20:13 crc kubenswrapper[4982]: I0122 07:20:13.619563 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:16 crc kubenswrapper[4982]: I0122 07:20:16.280219 4982 generic.go:334] "Generic (PLEG): container finished" podID="61a89c15-5367-4977-8be9-80f16a2ef3a5" containerID="276bc140d3653f77c7c4033e854c9e0b888327a783659c51968322d8e2fd52ed" exitCode=0 Jan 22 07:20:16 crc kubenswrapper[4982]: I0122 07:20:16.280293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w2b2h" event={"ID":"61a89c15-5367-4977-8be9-80f16a2ef3a5","Type":"ContainerDied","Data":"276bc140d3653f77c7c4033e854c9e0b888327a783659c51968322d8e2fd52ed"} Jan 22 07:20:16 crc kubenswrapper[4982]: I0122 07:20:16.284738 4982 generic.go:334] "Generic (PLEG): container finished" podID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerID="cc9e3637809723a4daa641951eb2622d85f8cc093b387b4d4ea06d2b61695bd3" exitCode=0 Jan 22 07:20:16 crc kubenswrapper[4982]: I0122 07:20:16.284781 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerDied","Data":"cc9e3637809723a4daa641951eb2622d85f8cc093b387b4d4ea06d2b61695bd3"} Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.297203 4982 generic.go:334] "Generic (PLEG): container finished" podID="ea8cecab-7e99-40fe-93da-eb0a9427a422" containerID="37dd053aa395863a7ed087aef355f968037903e695d5b8024276bcf8476373a1" exitCode=0 Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.297323 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kszzd" event={"ID":"ea8cecab-7e99-40fe-93da-eb0a9427a422","Type":"ContainerDied","Data":"37dd053aa395863a7ed087aef355f968037903e695d5b8024276bcf8476373a1"} Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.687893 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.767371 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle\") pod \"61a89c15-5367-4977-8be9-80f16a2ef3a5\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.767457 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts\") pod \"61a89c15-5367-4977-8be9-80f16a2ef3a5\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.767620 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5s9q\" (UniqueName: \"kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q\") pod \"61a89c15-5367-4977-8be9-80f16a2ef3a5\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.767709 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data\") pod \"61a89c15-5367-4977-8be9-80f16a2ef3a5\" (UID: \"61a89c15-5367-4977-8be9-80f16a2ef3a5\") " Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.774784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q" (OuterVolumeSpecName: "kube-api-access-z5s9q") pod "61a89c15-5367-4977-8be9-80f16a2ef3a5" (UID: "61a89c15-5367-4977-8be9-80f16a2ef3a5"). InnerVolumeSpecName "kube-api-access-z5s9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.779177 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts" (OuterVolumeSpecName: "scripts") pod "61a89c15-5367-4977-8be9-80f16a2ef3a5" (UID: "61a89c15-5367-4977-8be9-80f16a2ef3a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.797940 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data" (OuterVolumeSpecName: "config-data") pod "61a89c15-5367-4977-8be9-80f16a2ef3a5" (UID: "61a89c15-5367-4977-8be9-80f16a2ef3a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.803354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61a89c15-5367-4977-8be9-80f16a2ef3a5" (UID: "61a89c15-5367-4977-8be9-80f16a2ef3a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.873007 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.873098 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.873127 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5s9q\" (UniqueName: \"kubernetes.io/projected/61a89c15-5367-4977-8be9-80f16a2ef3a5-kube-api-access-z5s9q\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:17 crc kubenswrapper[4982]: I0122 07:20:17.873140 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61a89c15-5367-4977-8be9-80f16a2ef3a5-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.243006 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.243069 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.306458 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerStarted","Data":"d2b51e77c1399e19d3413898a8d5e4e22fbb92517eaa3213d2db3db35ec58906"} Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.308328 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w2b2h" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.308358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w2b2h" event={"ID":"61a89c15-5367-4977-8be9-80f16a2ef3a5","Type":"ContainerDied","Data":"d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8"} Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.308742 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2432bea05e543dc778368a76339b8aaf7abd4d1e63d48a6955196821c82cff8" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.355255 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ht6kq" podStartSLOduration=2.871801913 podStartE2EDuration="7.35523281s" podCreationTimestamp="2026-01-22 07:20:11 +0000 UTC" firstStartedPulling="2026-01-22 07:20:13.24518746 +0000 UTC m=+5674.083825463" lastFinishedPulling="2026-01-22 07:20:17.728618337 +0000 UTC m=+5678.567256360" observedRunningTime="2026-01-22 07:20:18.340199935 +0000 UTC m=+5679.178837938" watchObservedRunningTime="2026-01-22 07:20:18.35523281 +0000 UTC m=+5679.193870803" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.507363 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.507651 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2455cd98-0827-4b25-8a4f-21514968b2ea" containerName="nova-scheduler-scheduler" containerID="cri-o://500ce2176da325fb5e94f133f09a7de2be123fa6c5c5916965f3755057860ef0" gracePeriod=30 Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.519609 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.519823 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-log" containerID="cri-o://2782313b336cdc7a7befeae93234349361058999655c9e01fec69c3ebd539c83" gracePeriod=30 Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.519976 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-api" containerID="cri-o://3f9f582dc5ad0af958975ce1ddf7c437b6bf1e174b4bd806ae44bb1308e61b70" gracePeriod=30 Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.520429 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.520452 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.530237 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.57:8774/\": EOF" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.530286 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.57:8774/\": EOF" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.610618 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.617012 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.620833 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.684964 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.720067 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.720281 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="dnsmasq-dns" containerID="cri-o://db35e60ede15ea834b698e6d9c87973496cf0f9256898dbf4c9214e03ff2dab4" gracePeriod=10 Jan 22 07:20:18 crc kubenswrapper[4982]: I0122 07:20:18.849815 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.005386 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts\") pod \"ea8cecab-7e99-40fe-93da-eb0a9427a422\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.006092 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wkzp\" (UniqueName: \"kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp\") pod \"ea8cecab-7e99-40fe-93da-eb0a9427a422\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.006142 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle\") pod \"ea8cecab-7e99-40fe-93da-eb0a9427a422\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.006270 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data\") pod \"ea8cecab-7e99-40fe-93da-eb0a9427a422\" (UID: \"ea8cecab-7e99-40fe-93da-eb0a9427a422\") " Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.019643 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp" (OuterVolumeSpecName: "kube-api-access-2wkzp") pod "ea8cecab-7e99-40fe-93da-eb0a9427a422" (UID: "ea8cecab-7e99-40fe-93da-eb0a9427a422"). InnerVolumeSpecName "kube-api-access-2wkzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.023094 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts" (OuterVolumeSpecName: "scripts") pod "ea8cecab-7e99-40fe-93da-eb0a9427a422" (UID: "ea8cecab-7e99-40fe-93da-eb0a9427a422"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.061247 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data" (OuterVolumeSpecName: "config-data") pod "ea8cecab-7e99-40fe-93da-eb0a9427a422" (UID: "ea8cecab-7e99-40fe-93da-eb0a9427a422"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.101985 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea8cecab-7e99-40fe-93da-eb0a9427a422" (UID: "ea8cecab-7e99-40fe-93da-eb0a9427a422"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.108146 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wkzp\" (UniqueName: \"kubernetes.io/projected/ea8cecab-7e99-40fe-93da-eb0a9427a422-kube-api-access-2wkzp\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.108206 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.108217 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.108226 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea8cecab-7e99-40fe-93da-eb0a9427a422-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.325481 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kszzd" event={"ID":"ea8cecab-7e99-40fe-93da-eb0a9427a422","Type":"ContainerDied","Data":"38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979"} Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.325526 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38807e07d3f81443086721325f97d42543770565f6fb2cb67e5776418f588979" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.325616 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kszzd" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.328648 4982 generic.go:334] "Generic (PLEG): container finished" podID="6f979a4f-d850-4fca-9abf-34073ce42424" containerID="2782313b336cdc7a7befeae93234349361058999655c9e01fec69c3ebd539c83" exitCode=143 Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.328707 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerDied","Data":"2782313b336cdc7a7befeae93234349361058999655c9e01fec69c3ebd539c83"} Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.331073 4982 generic.go:334] "Generic (PLEG): container finished" podID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerID="db35e60ede15ea834b698e6d9c87973496cf0f9256898dbf4c9214e03ff2dab4" exitCode=0 Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.332547 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" event={"ID":"e6ecea22-10f0-439a-94d1-eb095d24fa73","Type":"ContainerDied","Data":"db35e60ede15ea834b698e6d9c87973496cf0f9256898dbf4c9214e03ff2dab4"} Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.333139 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-log" containerID="cri-o://f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90" gracePeriod=30 Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.333219 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-metadata" containerID="cri-o://0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e" gracePeriod=30 Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.349803 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.352915 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.58:8775/\": EOF" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.353148 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.58:8775/\": EOF" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.360552 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.45:5353: connect: connection refused" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.455912 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:20:19 crc kubenswrapper[4982]: E0122 07:20:19.456746 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea8cecab-7e99-40fe-93da-eb0a9427a422" containerName="nova-cell1-conductor-db-sync" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.456762 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea8cecab-7e99-40fe-93da-eb0a9427a422" containerName="nova-cell1-conductor-db-sync" Jan 22 07:20:19 crc kubenswrapper[4982]: E0122 07:20:19.456788 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a89c15-5367-4977-8be9-80f16a2ef3a5" containerName="nova-manage" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.456794 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a89c15-5367-4977-8be9-80f16a2ef3a5" containerName="nova-manage" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.457592 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea8cecab-7e99-40fe-93da-eb0a9427a422" containerName="nova-cell1-conductor-db-sync" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.457662 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="61a89c15-5367-4977-8be9-80f16a2ef3a5" containerName="nova-manage" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.458914 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.462605 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.510679 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.530042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.530117 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lclj9\" (UniqueName: \"kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.530242 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.631512 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.631585 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lclj9\" (UniqueName: \"kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.631743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.638369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.642958 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.664331 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lclj9\" (UniqueName: \"kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9\") pod \"nova-cell1-conductor-0\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:19 crc kubenswrapper[4982]: I0122 07:20:19.811082 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.040561 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.146560 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb\") pod \"e6ecea22-10f0-439a-94d1-eb095d24fa73\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.146912 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc\") pod \"e6ecea22-10f0-439a-94d1-eb095d24fa73\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.146976 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config\") pod \"e6ecea22-10f0-439a-94d1-eb095d24fa73\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.147081 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qscqb\" (UniqueName: \"kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb\") pod \"e6ecea22-10f0-439a-94d1-eb095d24fa73\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.147131 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb\") pod \"e6ecea22-10f0-439a-94d1-eb095d24fa73\" (UID: \"e6ecea22-10f0-439a-94d1-eb095d24fa73\") " Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.152953 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb" (OuterVolumeSpecName: "kube-api-access-qscqb") pod "e6ecea22-10f0-439a-94d1-eb095d24fa73" (UID: "e6ecea22-10f0-439a-94d1-eb095d24fa73"). InnerVolumeSpecName "kube-api-access-qscqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.189905 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e6ecea22-10f0-439a-94d1-eb095d24fa73" (UID: "e6ecea22-10f0-439a-94d1-eb095d24fa73"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.198512 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config" (OuterVolumeSpecName: "config") pod "e6ecea22-10f0-439a-94d1-eb095d24fa73" (UID: "e6ecea22-10f0-439a-94d1-eb095d24fa73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.198671 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e6ecea22-10f0-439a-94d1-eb095d24fa73" (UID: "e6ecea22-10f0-439a-94d1-eb095d24fa73"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.209407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e6ecea22-10f0-439a-94d1-eb095d24fa73" (UID: "e6ecea22-10f0-439a-94d1-eb095d24fa73"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.249671 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.249728 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.249752 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.249764 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qscqb\" (UniqueName: \"kubernetes.io/projected/e6ecea22-10f0-439a-94d1-eb095d24fa73-kube-api-access-qscqb\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.249778 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6ecea22-10f0-439a-94d1-eb095d24fa73-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.354228 4982 generic.go:334] "Generic (PLEG): container finished" podID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerID="f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90" exitCode=143 Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.354287 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerDied","Data":"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90"} Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.361993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" event={"ID":"e6ecea22-10f0-439a-94d1-eb095d24fa73","Type":"ContainerDied","Data":"c115cc1a11aef0b46a98ca9581318e3e7111cd52eef3a9b1c7ca2ec75781c400"} Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.362043 4982 scope.go:117] "RemoveContainer" containerID="db35e60ede15ea834b698e6d9c87973496cf0f9256898dbf4c9214e03ff2dab4" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.362058 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f66ff87f9-twgk7" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.374572 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.402758 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.403307 4982 scope.go:117] "RemoveContainer" containerID="58a80f26265211208fba9d90809a57da99fcfe5475e37544cbf9d669c244d96a" Jan 22 07:20:20 crc kubenswrapper[4982]: I0122 07:20:20.411573 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f66ff87f9-twgk7"] Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.377830 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c198eda0-01d3-443e-b342-5039d9cfb0d5","Type":"ContainerStarted","Data":"63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e"} Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.377911 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c198eda0-01d3-443e-b342-5039d9cfb0d5","Type":"ContainerStarted","Data":"bae5bc9af01c511a52f88735df06e4fe9f2694d3f6c0c8a3c41cdd586c6fbe62"} Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.391029 4982 generic.go:334] "Generic (PLEG): container finished" podID="2455cd98-0827-4b25-8a4f-21514968b2ea" containerID="500ce2176da325fb5e94f133f09a7de2be123fa6c5c5916965f3755057860ef0" exitCode=0 Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.391076 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2455cd98-0827-4b25-8a4f-21514968b2ea","Type":"ContainerDied","Data":"500ce2176da325fb5e94f133f09a7de2be123fa6c5c5916965f3755057860ef0"} Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.515370 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.674360 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle\") pod \"2455cd98-0827-4b25-8a4f-21514968b2ea\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.674499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5llg\" (UniqueName: \"kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg\") pod \"2455cd98-0827-4b25-8a4f-21514968b2ea\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.675632 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data\") pod \"2455cd98-0827-4b25-8a4f-21514968b2ea\" (UID: \"2455cd98-0827-4b25-8a4f-21514968b2ea\") " Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.680218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg" (OuterVolumeSpecName: "kube-api-access-d5llg") pod "2455cd98-0827-4b25-8a4f-21514968b2ea" (UID: "2455cd98-0827-4b25-8a4f-21514968b2ea"). InnerVolumeSpecName "kube-api-access-d5llg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.717646 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data" (OuterVolumeSpecName: "config-data") pod "2455cd98-0827-4b25-8a4f-21514968b2ea" (UID: "2455cd98-0827-4b25-8a4f-21514968b2ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.728145 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2455cd98-0827-4b25-8a4f-21514968b2ea" (UID: "2455cd98-0827-4b25-8a4f-21514968b2ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.735168 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" path="/var/lib/kubelet/pods/e6ecea22-10f0-439a-94d1-eb095d24fa73/volumes" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.778184 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.778220 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2455cd98-0827-4b25-8a4f-21514968b2ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.778235 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5llg\" (UniqueName: \"kubernetes.io/projected/2455cd98-0827-4b25-8a4f-21514968b2ea-kube-api-access-d5llg\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.956887 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:21 crc kubenswrapper[4982]: I0122 07:20:21.956929 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.007119 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.405190 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2455cd98-0827-4b25-8a4f-21514968b2ea","Type":"ContainerDied","Data":"bd9c890456ee904d4f3b03a7b512924b193882d250f715b3a1a026664077acf7"} Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.405312 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.405613 4982 scope.go:117] "RemoveContainer" containerID="500ce2176da325fb5e94f133f09a7de2be123fa6c5c5916965f3755057860ef0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.437840 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.437815374 podStartE2EDuration="3.437815374s" podCreationTimestamp="2026-01-22 07:20:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:22.429635633 +0000 UTC m=+5683.268273686" watchObservedRunningTime="2026-01-22 07:20:22.437815374 +0000 UTC m=+5683.276453417" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.475846 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.485713 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.494467 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:22 crc kubenswrapper[4982]: E0122 07:20:22.494956 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="init" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.494979 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="init" Jan 22 07:20:22 crc kubenswrapper[4982]: E0122 07:20:22.495011 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2455cd98-0827-4b25-8a4f-21514968b2ea" containerName="nova-scheduler-scheduler" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.495020 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2455cd98-0827-4b25-8a4f-21514968b2ea" containerName="nova-scheduler-scheduler" Jan 22 07:20:22 crc kubenswrapper[4982]: E0122 07:20:22.495046 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="dnsmasq-dns" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.495054 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="dnsmasq-dns" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.495270 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2455cd98-0827-4b25-8a4f-21514968b2ea" containerName="nova-scheduler-scheduler" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.495301 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6ecea22-10f0-439a-94d1-eb095d24fa73" containerName="dnsmasq-dns" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.496099 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.502820 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.503216 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.593148 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.593344 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdhfw\" (UniqueName: \"kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.593387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.695013 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.695194 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.695405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdhfw\" (UniqueName: \"kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.709181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.713898 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.716234 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdhfw\" (UniqueName: \"kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw\") pod \"nova-scheduler-0\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:22 crc kubenswrapper[4982]: I0122 07:20:22.837026 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:23 crc kubenswrapper[4982]: I0122 07:20:23.314779 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:23 crc kubenswrapper[4982]: I0122 07:20:23.421580 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ce4be60-2a12-4a46-92eb-d2289fcdc425","Type":"ContainerStarted","Data":"3fd8ae39fe7bc81938928186bff259a852df6b90133454d783b9f847951ffdc0"} Jan 22 07:20:23 crc kubenswrapper[4982]: I0122 07:20:23.736733 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2455cd98-0827-4b25-8a4f-21514968b2ea" path="/var/lib/kubelet/pods/2455cd98-0827-4b25-8a4f-21514968b2ea/volumes" Jan 22 07:20:24 crc kubenswrapper[4982]: I0122 07:20:24.430377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ce4be60-2a12-4a46-92eb-d2289fcdc425","Type":"ContainerStarted","Data":"712096cc2660c5263809454e00765829c670ec7d7c0182266273b0ae97267cef"} Jan 22 07:20:24 crc kubenswrapper[4982]: I0122 07:20:24.460476 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.460457733 podStartE2EDuration="2.460457733s" podCreationTimestamp="2026-01-22 07:20:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:24.453155727 +0000 UTC m=+5685.291793730" watchObservedRunningTime="2026-01-22 07:20:24.460457733 +0000 UTC m=+5685.299095736" Jan 22 07:20:24 crc kubenswrapper[4982]: I0122 07:20:24.811911 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:25 crc kubenswrapper[4982]: I0122 07:20:25.441298 4982 generic.go:334] "Generic (PLEG): container finished" podID="6f979a4f-d850-4fca-9abf-34073ce42424" containerID="3f9f582dc5ad0af958975ce1ddf7c437b6bf1e174b4bd806ae44bb1308e61b70" exitCode=0 Jan 22 07:20:25 crc kubenswrapper[4982]: I0122 07:20:25.441390 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerDied","Data":"3f9f582dc5ad0af958975ce1ddf7c437b6bf1e174b4bd806ae44bb1308e61b70"} Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.069650 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.173069 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st8jk\" (UniqueName: \"kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk\") pod \"6f979a4f-d850-4fca-9abf-34073ce42424\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.173166 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data\") pod \"6f979a4f-d850-4fca-9abf-34073ce42424\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.173212 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle\") pod \"6f979a4f-d850-4fca-9abf-34073ce42424\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.173238 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs\") pod \"6f979a4f-d850-4fca-9abf-34073ce42424\" (UID: \"6f979a4f-d850-4fca-9abf-34073ce42424\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.174453 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs" (OuterVolumeSpecName: "logs") pod "6f979a4f-d850-4fca-9abf-34073ce42424" (UID: "6f979a4f-d850-4fca-9abf-34073ce42424"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.181781 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk" (OuterVolumeSpecName: "kube-api-access-st8jk") pod "6f979a4f-d850-4fca-9abf-34073ce42424" (UID: "6f979a4f-d850-4fca-9abf-34073ce42424"). InnerVolumeSpecName "kube-api-access-st8jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.216270 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data" (OuterVolumeSpecName: "config-data") pod "6f979a4f-d850-4fca-9abf-34073ce42424" (UID: "6f979a4f-d850-4fca-9abf-34073ce42424"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.248119 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f979a4f-d850-4fca-9abf-34073ce42424" (UID: "6f979a4f-d850-4fca-9abf-34073ce42424"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.275463 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.275505 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f979a4f-d850-4fca-9abf-34073ce42424-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.275518 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st8jk\" (UniqueName: \"kubernetes.io/projected/6f979a4f-d850-4fca-9abf-34073ce42424-kube-api-access-st8jk\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.275532 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f979a4f-d850-4fca-9abf-34073ce42424-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.355831 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.454032 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f979a4f-d850-4fca-9abf-34073ce42424","Type":"ContainerDied","Data":"013072656db1f260e5ed3ced2b5bbdc1b142d05022cdc59210cad12d85411bad"} Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.454071 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.454101 4982 scope.go:117] "RemoveContainer" containerID="3f9f582dc5ad0af958975ce1ddf7c437b6bf1e174b4bd806ae44bb1308e61b70" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.456078 4982 generic.go:334] "Generic (PLEG): container finished" podID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerID="0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e" exitCode=0 Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.456108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerDied","Data":"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e"} Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.456139 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.456140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b94b54e1-6f53-486e-aa50-2aafe9f78c26","Type":"ContainerDied","Data":"18f97fdb07365a92be064a5143fbd82d907eab2db0b6fb3ff9985b2c47841a1f"} Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.477710 4982 scope.go:117] "RemoveContainer" containerID="2782313b336cdc7a7befeae93234349361058999655c9e01fec69c3ebd539c83" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.484636 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle\") pod \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.484741 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data\") pod \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.484821 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs\") pod \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.484876 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jcc4\" (UniqueName: \"kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4\") pod \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\" (UID: \"b94b54e1-6f53-486e-aa50-2aafe9f78c26\") " Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.486093 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs" (OuterVolumeSpecName: "logs") pod "b94b54e1-6f53-486e-aa50-2aafe9f78c26" (UID: "b94b54e1-6f53-486e-aa50-2aafe9f78c26"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.493147 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.496350 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4" (OuterVolumeSpecName: "kube-api-access-5jcc4") pod "b94b54e1-6f53-486e-aa50-2aafe9f78c26" (UID: "b94b54e1-6f53-486e-aa50-2aafe9f78c26"). InnerVolumeSpecName "kube-api-access-5jcc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.503664 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.515890 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.516310 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-log" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516335 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-log" Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.516376 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-api" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516387 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-api" Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.516399 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-metadata" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516407 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-metadata" Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.516425 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-log" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516432 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-log" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516632 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-log" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516652 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-api" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516667 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" containerName="nova-metadata-metadata" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.516679 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" containerName="nova-api-log" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.517826 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.520514 4982 scope.go:117] "RemoveContainer" containerID="0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.523074 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data" (OuterVolumeSpecName: "config-data") pod "b94b54e1-6f53-486e-aa50-2aafe9f78c26" (UID: "b94b54e1-6f53-486e-aa50-2aafe9f78c26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.526180 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.543072 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.551996 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b94b54e1-6f53-486e-aa50-2aafe9f78c26" (UID: "b94b54e1-6f53-486e-aa50-2aafe9f78c26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.556518 4982 scope.go:117] "RemoveContainer" containerID="f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.576127 4982 scope.go:117] "RemoveContainer" containerID="0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e" Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.576822 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e\": container with ID starting with 0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e not found: ID does not exist" containerID="0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.576975 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e"} err="failed to get container status \"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e\": rpc error: code = NotFound desc = could not find container \"0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e\": container with ID starting with 0437f8f24df9aa84c962c5397e033b7693f74cbb79bd5c1a41bdf4ce8fc1c98e not found: ID does not exist" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.577188 4982 scope.go:117] "RemoveContainer" containerID="f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90" Jan 22 07:20:26 crc kubenswrapper[4982]: E0122 07:20:26.577954 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90\": container with ID starting with f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90 not found: ID does not exist" containerID="f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.577994 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90"} err="failed to get container status \"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90\": rpc error: code = NotFound desc = could not find container \"f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90\": container with ID starting with f252716ca30b9f777ded287ac1f332c844618a02aeddc71fd21218a10b331e90 not found: ID does not exist" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588360 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpw42\" (UniqueName: \"kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588635 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588721 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588784 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588797 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b94b54e1-6f53-486e-aa50-2aafe9f78c26-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588805 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b94b54e1-6f53-486e-aa50-2aafe9f78c26-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.588817 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jcc4\" (UniqueName: \"kubernetes.io/projected/b94b54e1-6f53-486e-aa50-2aafe9f78c26-kube-api-access-5jcc4\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.690415 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.690543 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.690622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.690671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpw42\" (UniqueName: \"kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.691325 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.694090 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.694390 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.706871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpw42\" (UniqueName: \"kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42\") pod \"nova-api-0\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.788637 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.800521 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.808679 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.810459 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.813545 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.830654 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.849715 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.893457 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.893502 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.893528 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.893577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6lr8\" (UniqueName: \"kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.994929 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.995269 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.995301 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.995361 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6lr8\" (UniqueName: \"kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:26 crc kubenswrapper[4982]: I0122 07:20:26.995693 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.006894 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.008838 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.018129 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6lr8\" (UniqueName: \"kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8\") pod \"nova-metadata-0\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " pod="openstack/nova-metadata-0" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.130489 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.294421 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.475287 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerStarted","Data":"44299e0ec326ab5daa69e4f49a1f0310f210673488732c30d7d0baf3ef397e0b"} Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.603943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.732513 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f979a4f-d850-4fca-9abf-34073ce42424" path="/var/lib/kubelet/pods/6f979a4f-d850-4fca-9abf-34073ce42424/volumes" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.733634 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b94b54e1-6f53-486e-aa50-2aafe9f78c26" path="/var/lib/kubelet/pods/b94b54e1-6f53-486e-aa50-2aafe9f78c26/volumes" Jan 22 07:20:27 crc kubenswrapper[4982]: I0122 07:20:27.838086 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.484742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerStarted","Data":"43c5af1f104e8f84aa52324894eda67c481a506b1849e68ebc9d20e1b957d710"} Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.484782 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerStarted","Data":"6153962d7833b0aeb0871a316cf1d78546d82f1b5794b37b2b3dc08ac6a7dd18"} Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.487002 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerStarted","Data":"5e750ce441a2b7b4ba5a9df1514d067e7ce683ea26620af49e89286c133b8f84"} Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.487054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerStarted","Data":"b9f1c01d49d826ec29b2befcafef894a638668394e1d99e1d4cad22bddde847f"} Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.487071 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerStarted","Data":"c524a28d2be474c4872f137fe3aa65ebcab07b7c056511fb4508a48dac61f19c"} Jan 22 07:20:28 crc kubenswrapper[4982]: I0122 07:20:28.516876 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.516847271 podStartE2EDuration="2.516847271s" podCreationTimestamp="2026-01-22 07:20:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:28.506485022 +0000 UTC m=+5689.345123045" watchObservedRunningTime="2026-01-22 07:20:28.516847271 +0000 UTC m=+5689.355485274" Jan 22 07:20:29 crc kubenswrapper[4982]: I0122 07:20:29.522294 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.522269251 podStartE2EDuration="3.522269251s" podCreationTimestamp="2026-01-22 07:20:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:29.511054519 +0000 UTC m=+5690.349692532" watchObservedRunningTime="2026-01-22 07:20:29.522269251 +0000 UTC m=+5690.360907264" Jan 22 07:20:29 crc kubenswrapper[4982]: I0122 07:20:29.841367 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.298686 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-xgj4f"] Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.299871 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.313083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.313378 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.318778 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xgj4f"] Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.403378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.403467 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.403491 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.403515 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh9wl\" (UniqueName: \"kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.505017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.505140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.505175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.505219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh9wl\" (UniqueName: \"kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.511249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.511771 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.512037 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.523683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh9wl\" (UniqueName: \"kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl\") pod \"nova-cell1-cell-mapping-xgj4f\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:30 crc kubenswrapper[4982]: I0122 07:20:30.622272 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:31 crc kubenswrapper[4982]: I0122 07:20:31.123075 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xgj4f"] Jan 22 07:20:31 crc kubenswrapper[4982]: W0122 07:20:31.125077 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2b8dcac_4ae8_4264_a0e9_67e14d9c1ab9.slice/crio-68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d WatchSource:0}: Error finding container 68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d: Status 404 returned error can't find the container with id 68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d Jan 22 07:20:31 crc kubenswrapper[4982]: I0122 07:20:31.532123 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xgj4f" event={"ID":"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9","Type":"ContainerStarted","Data":"f9c7323d761213c9a2e77661b39b87257cbb660b961d5a20095c5a33b5885bdb"} Jan 22 07:20:31 crc kubenswrapper[4982]: I0122 07:20:31.532460 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xgj4f" event={"ID":"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9","Type":"ContainerStarted","Data":"68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d"} Jan 22 07:20:31 crc kubenswrapper[4982]: I0122 07:20:31.563768 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-xgj4f" podStartSLOduration=1.563744579 podStartE2EDuration="1.563744579s" podCreationTimestamp="2026-01-22 07:20:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:31.551125398 +0000 UTC m=+5692.389763401" watchObservedRunningTime="2026-01-22 07:20:31.563744579 +0000 UTC m=+5692.402382592" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.008567 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.131500 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.131566 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.837754 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.888549 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.925140 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:32 crc kubenswrapper[4982]: I0122 07:20:32.925333 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ht6kq" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="registry-server" containerID="cri-o://d2b51e77c1399e19d3413898a8d5e4e22fbb92517eaa3213d2db3db35ec58906" gracePeriod=2 Jan 22 07:20:33 crc kubenswrapper[4982]: I0122 07:20:33.557440 4982 generic.go:334] "Generic (PLEG): container finished" podID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerID="d2b51e77c1399e19d3413898a8d5e4e22fbb92517eaa3213d2db3db35ec58906" exitCode=0 Jan 22 07:20:33 crc kubenswrapper[4982]: I0122 07:20:33.557483 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerDied","Data":"d2b51e77c1399e19d3413898a8d5e4e22fbb92517eaa3213d2db3db35ec58906"} Jan 22 07:20:33 crc kubenswrapper[4982]: I0122 07:20:33.582087 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:33.916065 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.001667 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content\") pod \"a531b202-6417-42a1-8036-0a3a7b68caa4\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.001967 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjf5s\" (UniqueName: \"kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s\") pod \"a531b202-6417-42a1-8036-0a3a7b68caa4\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.002040 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities\") pod \"a531b202-6417-42a1-8036-0a3a7b68caa4\" (UID: \"a531b202-6417-42a1-8036-0a3a7b68caa4\") " Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.002999 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities" (OuterVolumeSpecName: "utilities") pod "a531b202-6417-42a1-8036-0a3a7b68caa4" (UID: "a531b202-6417-42a1-8036-0a3a7b68caa4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.013090 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s" (OuterVolumeSpecName: "kube-api-access-pjf5s") pod "a531b202-6417-42a1-8036-0a3a7b68caa4" (UID: "a531b202-6417-42a1-8036-0a3a7b68caa4"). InnerVolumeSpecName "kube-api-access-pjf5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.051210 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a531b202-6417-42a1-8036-0a3a7b68caa4" (UID: "a531b202-6417-42a1-8036-0a3a7b68caa4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.105424 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.105451 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjf5s\" (UniqueName: \"kubernetes.io/projected/a531b202-6417-42a1-8036-0a3a7b68caa4-kube-api-access-pjf5s\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.105462 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a531b202-6417-42a1-8036-0a3a7b68caa4-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.569441 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ht6kq" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.571703 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ht6kq" event={"ID":"a531b202-6417-42a1-8036-0a3a7b68caa4","Type":"ContainerDied","Data":"b9ba92e4dab4ff201a01128d534c231a7b8df4a25cd071479d41e5c33ddbc170"} Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.571755 4982 scope.go:117] "RemoveContainer" containerID="d2b51e77c1399e19d3413898a8d5e4e22fbb92517eaa3213d2db3db35ec58906" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.616382 4982 scope.go:117] "RemoveContainer" containerID="cc9e3637809723a4daa641951eb2622d85f8cc093b387b4d4ea06d2b61695bd3" Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.626712 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.643726 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ht6kq"] Jan 22 07:20:34 crc kubenswrapper[4982]: I0122 07:20:34.644152 4982 scope.go:117] "RemoveContainer" containerID="20d3e258c153970ca3181298c669cfa7b0012cf5071a4568d73409fe0911a29b" Jan 22 07:20:35 crc kubenswrapper[4982]: I0122 07:20:35.732145 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" path="/var/lib/kubelet/pods/a531b202-6417-42a1-8036-0a3a7b68caa4/volumes" Jan 22 07:20:36 crc kubenswrapper[4982]: I0122 07:20:36.589909 4982 generic.go:334] "Generic (PLEG): container finished" podID="b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" containerID="f9c7323d761213c9a2e77661b39b87257cbb660b961d5a20095c5a33b5885bdb" exitCode=0 Jan 22 07:20:36 crc kubenswrapper[4982]: I0122 07:20:36.589963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xgj4f" event={"ID":"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9","Type":"ContainerDied","Data":"f9c7323d761213c9a2e77661b39b87257cbb660b961d5a20095c5a33b5885bdb"} Jan 22 07:20:36 crc kubenswrapper[4982]: I0122 07:20:36.850648 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:36 crc kubenswrapper[4982]: I0122 07:20:36.851322 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:37 crc kubenswrapper[4982]: I0122 07:20:37.131001 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:37 crc kubenswrapper[4982]: I0122 07:20:37.131131 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:37 crc kubenswrapper[4982]: I0122 07:20:37.934411 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.66:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:37 crc kubenswrapper[4982]: I0122 07:20:37.934977 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.66:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.066630 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.075831 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts\") pod \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.075955 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh9wl\" (UniqueName: \"kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl\") pod \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.076012 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data\") pod \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.076094 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle\") pod \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\" (UID: \"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9\") " Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.081715 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl" (OuterVolumeSpecName: "kube-api-access-sh9wl") pod "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" (UID: "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9"). InnerVolumeSpecName "kube-api-access-sh9wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.087804 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts" (OuterVolumeSpecName: "scripts") pod "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" (UID: "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.111422 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" (UID: "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.151598 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data" (OuterVolumeSpecName: "config-data") pod "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" (UID: "b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.201658 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.201702 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh9wl\" (UniqueName: \"kubernetes.io/projected/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-kube-api-access-sh9wl\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.201717 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.201733 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.240330 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.67:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.241342 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.67:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.609258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xgj4f" event={"ID":"b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9","Type":"ContainerDied","Data":"68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d"} Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.609307 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68025866d08a1fc9cf3c466cdcf7463be7358e1eed89e989917087f7e47d577d" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.609339 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xgj4f" Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.821968 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.822277 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" containerName="nova-scheduler-scheduler" containerID="cri-o://712096cc2660c5263809454e00765829c670ec7d7c0182266273b0ae97267cef" gracePeriod=30 Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.839469 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.839935 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-api" containerID="cri-o://43c5af1f104e8f84aa52324894eda67c481a506b1849e68ebc9d20e1b957d710" gracePeriod=30 Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.840177 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-log" containerID="cri-o://6153962d7833b0aeb0871a316cf1d78546d82f1b5794b37b2b3dc08ac6a7dd18" gracePeriod=30 Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.869803 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.870222 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-metadata" containerID="cri-o://5e750ce441a2b7b4ba5a9df1514d067e7ce683ea26620af49e89286c133b8f84" gracePeriod=30 Jan 22 07:20:38 crc kubenswrapper[4982]: I0122 07:20:38.870594 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-log" containerID="cri-o://b9f1c01d49d826ec29b2befcafef894a638668394e1d99e1d4cad22bddde847f" gracePeriod=30 Jan 22 07:20:39 crc kubenswrapper[4982]: I0122 07:20:39.618682 4982 generic.go:334] "Generic (PLEG): container finished" podID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerID="6153962d7833b0aeb0871a316cf1d78546d82f1b5794b37b2b3dc08ac6a7dd18" exitCode=143 Jan 22 07:20:39 crc kubenswrapper[4982]: I0122 07:20:39.618790 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerDied","Data":"6153962d7833b0aeb0871a316cf1d78546d82f1b5794b37b2b3dc08ac6a7dd18"} Jan 22 07:20:39 crc kubenswrapper[4982]: I0122 07:20:39.620936 4982 generic.go:334] "Generic (PLEG): container finished" podID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerID="b9f1c01d49d826ec29b2befcafef894a638668394e1d99e1d4cad22bddde847f" exitCode=143 Jan 22 07:20:39 crc kubenswrapper[4982]: I0122 07:20:39.620976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerDied","Data":"b9f1c01d49d826ec29b2befcafef894a638668394e1d99e1d4cad22bddde847f"} Jan 22 07:20:41 crc kubenswrapper[4982]: I0122 07:20:41.642226 4982 generic.go:334] "Generic (PLEG): container finished" podID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" containerID="712096cc2660c5263809454e00765829c670ec7d7c0182266273b0ae97267cef" exitCode=0 Jan 22 07:20:41 crc kubenswrapper[4982]: I0122 07:20:41.642308 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ce4be60-2a12-4a46-92eb-d2289fcdc425","Type":"ContainerDied","Data":"712096cc2660c5263809454e00765829c670ec7d7c0182266273b0ae97267cef"} Jan 22 07:20:41 crc kubenswrapper[4982]: I0122 07:20:41.905108 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.068457 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdhfw\" (UniqueName: \"kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw\") pod \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.068591 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle\") pod \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.068647 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data\") pod \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\" (UID: \"8ce4be60-2a12-4a46-92eb-d2289fcdc425\") " Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.074398 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw" (OuterVolumeSpecName: "kube-api-access-sdhfw") pod "8ce4be60-2a12-4a46-92eb-d2289fcdc425" (UID: "8ce4be60-2a12-4a46-92eb-d2289fcdc425"). InnerVolumeSpecName "kube-api-access-sdhfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.094020 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data" (OuterVolumeSpecName: "config-data") pod "8ce4be60-2a12-4a46-92eb-d2289fcdc425" (UID: "8ce4be60-2a12-4a46-92eb-d2289fcdc425"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.097103 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ce4be60-2a12-4a46-92eb-d2289fcdc425" (UID: "8ce4be60-2a12-4a46-92eb-d2289fcdc425"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.170383 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdhfw\" (UniqueName: \"kubernetes.io/projected/8ce4be60-2a12-4a46-92eb-d2289fcdc425-kube-api-access-sdhfw\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.170422 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.170433 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ce4be60-2a12-4a46-92eb-d2289fcdc425-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.652945 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8ce4be60-2a12-4a46-92eb-d2289fcdc425","Type":"ContainerDied","Data":"3fd8ae39fe7bc81938928186bff259a852df6b90133454d783b9f847951ffdc0"} Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.653222 4982 scope.go:117] "RemoveContainer" containerID="712096cc2660c5263809454e00765829c670ec7d7c0182266273b0ae97267cef" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.653230 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.689744 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.706345 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.739293 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:42 crc kubenswrapper[4982]: E0122 07:20:42.742337 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="registry-server" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742379 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="registry-server" Jan 22 07:20:42 crc kubenswrapper[4982]: E0122 07:20:42.742407 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" containerName="nova-manage" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742414 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" containerName="nova-manage" Jan 22 07:20:42 crc kubenswrapper[4982]: E0122 07:20:42.742428 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="extract-content" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742434 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="extract-content" Jan 22 07:20:42 crc kubenswrapper[4982]: E0122 07:20:42.742442 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" containerName="nova-scheduler-scheduler" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742450 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" containerName="nova-scheduler-scheduler" Jan 22 07:20:42 crc kubenswrapper[4982]: E0122 07:20:42.742467 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="extract-utilities" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742475 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="extract-utilities" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742656 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" containerName="nova-scheduler-scheduler" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742672 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" containerName="nova-manage" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.742689 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a531b202-6417-42a1-8036-0a3a7b68caa4" containerName="registry-server" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.743387 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.745576 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.765538 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.899939 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.900112 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:42 crc kubenswrapper[4982]: I0122 07:20:42.900154 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7txcw\" (UniqueName: \"kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.001566 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.001814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.001924 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7txcw\" (UniqueName: \"kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.005950 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.006160 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.021170 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7txcw\" (UniqueName: \"kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw\") pod \"nova-scheduler-0\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.063640 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.323434 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.666117 4982 generic.go:334] "Generic (PLEG): container finished" podID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerID="5e750ce441a2b7b4ba5a9df1514d067e7ce683ea26620af49e89286c133b8f84" exitCode=0 Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.666174 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerDied","Data":"5e750ce441a2b7b4ba5a9df1514d067e7ce683ea26620af49e89286c133b8f84"} Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.669881 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2965fcc-c5e4-4066-ade7-4262cbca707f","Type":"ContainerStarted","Data":"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1"} Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.669954 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2965fcc-c5e4-4066-ade7-4262cbca707f","Type":"ContainerStarted","Data":"54ea072336e85ceeddfa12db976fdf0cb5e2697b0c98b5e61ebe8ba0b75ff7c5"} Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.671759 4982 generic.go:334] "Generic (PLEG): container finished" podID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerID="43c5af1f104e8f84aa52324894eda67c481a506b1849e68ebc9d20e1b957d710" exitCode=0 Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.671795 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerDied","Data":"43c5af1f104e8f84aa52324894eda67c481a506b1849e68ebc9d20e1b957d710"} Jan 22 07:20:43 crc kubenswrapper[4982]: I0122 07:20:43.733105 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ce4be60-2a12-4a46-92eb-d2289fcdc425" path="/var/lib/kubelet/pods/8ce4be60-2a12-4a46-92eb-d2289fcdc425/volumes" Jan 22 07:20:44 crc kubenswrapper[4982]: I0122 07:20:44.888316 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:44 crc kubenswrapper[4982]: I0122 07:20:44.903608 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:44 crc kubenswrapper[4982]: I0122 07:20:44.920891 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.920842255 podStartE2EDuration="2.920842255s" podCreationTimestamp="2026-01-22 07:20:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:43.68615399 +0000 UTC m=+5704.524792093" watchObservedRunningTime="2026-01-22 07:20:44.920842255 +0000 UTC m=+5705.759480288" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.047821 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs\") pod \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.047950 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle\") pod \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.047982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs\") pod \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048008 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data\") pod \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048039 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpw42\" (UniqueName: \"kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42\") pod \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\" (UID: \"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048079 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6lr8\" (UniqueName: \"kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8\") pod \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048451 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs" (OuterVolumeSpecName: "logs") pod "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" (UID: "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048780 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data\") pod \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.048841 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle\") pod \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\" (UID: \"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73\") " Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.049203 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.050183 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs" (OuterVolumeSpecName: "logs") pod "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" (UID: "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.056829 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8" (OuterVolumeSpecName: "kube-api-access-m6lr8") pod "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" (UID: "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73"). InnerVolumeSpecName "kube-api-access-m6lr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.066916 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42" (OuterVolumeSpecName: "kube-api-access-jpw42") pod "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" (UID: "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f"). InnerVolumeSpecName "kube-api-access-jpw42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.074057 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" (UID: "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.074442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data" (OuterVolumeSpecName: "config-data") pod "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" (UID: "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.078289 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data" (OuterVolumeSpecName: "config-data") pod "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" (UID: "8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.080099 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" (UID: "6ad2f1df-87c2-4d25-aff1-e4e59b43df3f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150676 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150723 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150734 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpw42\" (UniqueName: \"kubernetes.io/projected/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-kube-api-access-jpw42\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150787 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6lr8\" (UniqueName: \"kubernetes.io/projected/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-kube-api-access-m6lr8\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150804 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150814 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.150826 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.690509 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.690571 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ad2f1df-87c2-4d25-aff1-e4e59b43df3f","Type":"ContainerDied","Data":"44299e0ec326ab5daa69e4f49a1f0310f210673488732c30d7d0baf3ef397e0b"} Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.690912 4982 scope.go:117] "RemoveContainer" containerID="43c5af1f104e8f84aa52324894eda67c481a506b1849e68ebc9d20e1b957d710" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.694884 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73","Type":"ContainerDied","Data":"c524a28d2be474c4872f137fe3aa65ebcab07b7c056511fb4508a48dac61f19c"} Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.694969 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.724534 4982 scope.go:117] "RemoveContainer" containerID="6153962d7833b0aeb0871a316cf1d78546d82f1b5794b37b2b3dc08ac6a7dd18" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.751211 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.755799 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.767100 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.791384 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.792714 4982 scope.go:117] "RemoveContainer" containerID="5e750ce441a2b7b4ba5a9df1514d067e7ce683ea26620af49e89286c133b8f84" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.814330 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: E0122 07:20:45.841307 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-log" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.841355 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-log" Jan 22 07:20:45 crc kubenswrapper[4982]: E0122 07:20:45.841390 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-metadata" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.841397 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-metadata" Jan 22 07:20:45 crc kubenswrapper[4982]: E0122 07:20:45.841433 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-api" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.841439 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-api" Jan 22 07:20:45 crc kubenswrapper[4982]: E0122 07:20:45.841462 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-log" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.841476 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-log" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.842118 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-api" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.842168 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-metadata" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.842195 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" containerName="nova-api-log" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.842211 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" containerName="nova-metadata-log" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.844167 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.852820 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.862934 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.883194 4982 scope.go:117] "RemoveContainer" containerID="b9f1c01d49d826ec29b2befcafef894a638668394e1d99e1d4cad22bddde847f" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.906567 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.923113 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.938464 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.941964 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.988890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.989073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zs7r\" (UniqueName: \"kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.989122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:45 crc kubenswrapper[4982]: I0122 07:20:45.989232 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.090663 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.090757 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.090818 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.090949 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq9lm\" (UniqueName: \"kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.090976 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.091008 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zs7r\" (UniqueName: \"kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.091030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.091057 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.092010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.097007 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.106600 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.108643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zs7r\" (UniqueName: \"kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r\") pod \"nova-api-0\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.180774 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.193084 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq9lm\" (UniqueName: \"kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.193142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.193175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.193222 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.193607 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.197193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.198114 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.210303 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq9lm\" (UniqueName: \"kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm\") pod \"nova-metadata-0\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.256760 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.652006 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.708673 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerStarted","Data":"910c9c43b0a9fd69834c3aad23314d46a026eee4b5e4a32af3bec04266e92e27"} Jan 22 07:20:46 crc kubenswrapper[4982]: I0122 07:20:46.793459 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:20:46 crc kubenswrapper[4982]: W0122 07:20:46.795561 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35d112d5_9f8f_4128_b4f5_a964e5b7f4e7.slice/crio-2fb33cd3219ed5b3201f4a64d19a7052de3b6528b7143c45f6b98ba8da439705 WatchSource:0}: Error finding container 2fb33cd3219ed5b3201f4a64d19a7052de3b6528b7143c45f6b98ba8da439705: Status 404 returned error can't find the container with id 2fb33cd3219ed5b3201f4a64d19a7052de3b6528b7143c45f6b98ba8da439705 Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.728959 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ad2f1df-87c2-4d25-aff1-e4e59b43df3f" path="/var/lib/kubelet/pods/6ad2f1df-87c2-4d25-aff1-e4e59b43df3f/volumes" Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.729833 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73" path="/var/lib/kubelet/pods/8949ae6c-08f2-4cf9-ac0e-1cd9152f3f73/volumes" Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.730402 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerStarted","Data":"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413"} Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.730429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerStarted","Data":"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a"} Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.730439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerStarted","Data":"2fb33cd3219ed5b3201f4a64d19a7052de3b6528b7143c45f6b98ba8da439705"} Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.730447 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerStarted","Data":"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75"} Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.730456 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerStarted","Data":"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2"} Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.752914 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.752893427 podStartE2EDuration="2.752893427s" podCreationTimestamp="2026-01-22 07:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:47.742484796 +0000 UTC m=+5708.581122819" watchObservedRunningTime="2026-01-22 07:20:47.752893427 +0000 UTC m=+5708.591531430" Jan 22 07:20:47 crc kubenswrapper[4982]: I0122 07:20:47.767526 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.767507101 podStartE2EDuration="2.767507101s" podCreationTimestamp="2026-01-22 07:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:20:47.76228042 +0000 UTC m=+5708.600918423" watchObservedRunningTime="2026-01-22 07:20:47.767507101 +0000 UTC m=+5708.606145104" Jan 22 07:20:48 crc kubenswrapper[4982]: I0122 07:20:48.064392 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 07:20:51 crc kubenswrapper[4982]: I0122 07:20:51.257174 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:51 crc kubenswrapper[4982]: I0122 07:20:51.257575 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:20:53 crc kubenswrapper[4982]: I0122 07:20:53.063901 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 07:20:53 crc kubenswrapper[4982]: I0122 07:20:53.102676 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 07:20:53 crc kubenswrapper[4982]: I0122 07:20:53.812497 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 07:20:56 crc kubenswrapper[4982]: I0122 07:20:56.181270 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:56 crc kubenswrapper[4982]: I0122 07:20:56.181708 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:20:56 crc kubenswrapper[4982]: I0122 07:20:56.257473 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:56 crc kubenswrapper[4982]: I0122 07:20:56.257520 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:20:57 crc kubenswrapper[4982]: I0122 07:20:57.263094 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:57 crc kubenswrapper[4982]: I0122 07:20:57.263094 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:57 crc kubenswrapper[4982]: I0122 07:20:57.346130 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:20:57 crc kubenswrapper[4982]: I0122 07:20:57.346166 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.184275 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.184620 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.184977 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.185047 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.190128 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.191733 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.260805 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.262289 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.263374 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.265207 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.404287 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.405892 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.418981 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.599991 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.600092 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqxkz\" (UniqueName: \"kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.600125 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.600145 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.600195 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.703673 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.704139 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqxkz\" (UniqueName: \"kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.704215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.704250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.704325 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.704685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.705073 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.705108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.708622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:06 crc kubenswrapper[4982]: I0122 07:21:06.733189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqxkz\" (UniqueName: \"kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz\") pod \"dnsmasq-dns-795b859695-ft69v\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:07 crc kubenswrapper[4982]: I0122 07:21:07.029227 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:07 crc kubenswrapper[4982]: I0122 07:21:07.496144 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:07 crc kubenswrapper[4982]: I0122 07:21:07.936718 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795b859695-ft69v" event={"ID":"0107b32f-c775-4e02-b7da-0b3ce5962efa","Type":"ContainerStarted","Data":"224a0956a2a26ed0d7ff46e6ab4cfc611c022c248def762800fada279f1a28c4"} Jan 22 07:21:08 crc kubenswrapper[4982]: I0122 07:21:08.946445 4982 generic.go:334] "Generic (PLEG): container finished" podID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerID="08160ab94d04a1560294cbf7296be24d31528d32b958407b42ca4b7039b95569" exitCode=0 Jan 22 07:21:08 crc kubenswrapper[4982]: I0122 07:21:08.946491 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795b859695-ft69v" event={"ID":"0107b32f-c775-4e02-b7da-0b3ce5962efa","Type":"ContainerDied","Data":"08160ab94d04a1560294cbf7296be24d31528d32b958407b42ca4b7039b95569"} Jan 22 07:21:09 crc kubenswrapper[4982]: I0122 07:21:09.955768 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795b859695-ft69v" event={"ID":"0107b32f-c775-4e02-b7da-0b3ce5962efa","Type":"ContainerStarted","Data":"27b21e85f1d3640b6ff3e32e788867934af20a1ac1044f24f5b726bb916c53e0"} Jan 22 07:21:09 crc kubenswrapper[4982]: I0122 07:21:09.956070 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:09 crc kubenswrapper[4982]: I0122 07:21:09.978836 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-795b859695-ft69v" podStartSLOduration=3.9788147350000003 podStartE2EDuration="3.978814735s" podCreationTimestamp="2026-01-22 07:21:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:09.974845648 +0000 UTC m=+5730.813483661" watchObservedRunningTime="2026-01-22 07:21:09.978814735 +0000 UTC m=+5730.817452738" Jan 22 07:21:17 crc kubenswrapper[4982]: I0122 07:21:17.031090 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:17 crc kubenswrapper[4982]: I0122 07:21:17.089177 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:21:17 crc kubenswrapper[4982]: I0122 07:21:17.089409 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59954b654c-lllgq" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="dnsmasq-dns" containerID="cri-o://120235f9d9478b2e75972df683a673a05304f0d5e4733caa7c187b972a87fdd7" gracePeriod=10 Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.035147 4982 generic.go:334] "Generic (PLEG): container finished" podID="268f96af-7a77-4afb-85c5-f39480ba4968" containerID="120235f9d9478b2e75972df683a673a05304f0d5e4733caa7c187b972a87fdd7" exitCode=0 Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.035249 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59954b654c-lllgq" event={"ID":"268f96af-7a77-4afb-85c5-f39480ba4968","Type":"ContainerDied","Data":"120235f9d9478b2e75972df683a673a05304f0d5e4733caa7c187b972a87fdd7"} Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.169736 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.313717 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb\") pod \"268f96af-7a77-4afb-85c5-f39480ba4968\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.313748 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb\") pod \"268f96af-7a77-4afb-85c5-f39480ba4968\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.313805 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqsr7\" (UniqueName: \"kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7\") pod \"268f96af-7a77-4afb-85c5-f39480ba4968\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.313837 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc\") pod \"268f96af-7a77-4afb-85c5-f39480ba4968\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.314006 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config\") pod \"268f96af-7a77-4afb-85c5-f39480ba4968\" (UID: \"268f96af-7a77-4afb-85c5-f39480ba4968\") " Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.319678 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7" (OuterVolumeSpecName: "kube-api-access-bqsr7") pod "268f96af-7a77-4afb-85c5-f39480ba4968" (UID: "268f96af-7a77-4afb-85c5-f39480ba4968"). InnerVolumeSpecName "kube-api-access-bqsr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.363467 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config" (OuterVolumeSpecName: "config") pod "268f96af-7a77-4afb-85c5-f39480ba4968" (UID: "268f96af-7a77-4afb-85c5-f39480ba4968"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.363799 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "268f96af-7a77-4afb-85c5-f39480ba4968" (UID: "268f96af-7a77-4afb-85c5-f39480ba4968"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.364013 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "268f96af-7a77-4afb-85c5-f39480ba4968" (UID: "268f96af-7a77-4afb-85c5-f39480ba4968"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.381626 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "268f96af-7a77-4afb-85c5-f39480ba4968" (UID: "268f96af-7a77-4afb-85c5-f39480ba4968"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.416493 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.416531 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.416547 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.416559 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqsr7\" (UniqueName: \"kubernetes.io/projected/268f96af-7a77-4afb-85c5-f39480ba4968-kube-api-access-bqsr7\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.416572 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/268f96af-7a77-4afb-85c5-f39480ba4968-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.973380 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:21:18 crc kubenswrapper[4982]: I0122 07:21:18.973432 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.043955 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59954b654c-lllgq" event={"ID":"268f96af-7a77-4afb-85c5-f39480ba4968","Type":"ContainerDied","Data":"6073782d2b900315742335d44a9b76442d79d785639579c1ff19d04615c46f30"} Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.044006 4982 scope.go:117] "RemoveContainer" containerID="120235f9d9478b2e75972df683a673a05304f0d5e4733caa7c187b972a87fdd7" Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.044118 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59954b654c-lllgq" Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.073939 4982 scope.go:117] "RemoveContainer" containerID="e841a55ecef172e405741273a9f3c10b2d646fe228b371fd81c93f9918617296" Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.079444 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.087095 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59954b654c-lllgq"] Jan 22 07:21:19 crc kubenswrapper[4982]: I0122 07:21:19.730062 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" path="/var/lib/kubelet/pods/268f96af-7a77-4afb-85c5-f39480ba4968/volumes" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.569499 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mdz95"] Jan 22 07:21:20 crc kubenswrapper[4982]: E0122 07:21:20.569929 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="dnsmasq-dns" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.569947 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="dnsmasq-dns" Jan 22 07:21:20 crc kubenswrapper[4982]: E0122 07:21:20.569981 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="init" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.569989 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="init" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.570212 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="268f96af-7a77-4afb-85c5-f39480ba4968" containerName="dnsmasq-dns" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.570914 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.585133 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mdz95"] Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.675070 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-e101-account-create-update-qcbwn"] Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.676320 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.678809 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.679534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.679658 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m49xq\" (UniqueName: \"kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.683086 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e101-account-create-update-qcbwn"] Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.781637 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.781753 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2l9w\" (UniqueName: \"kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.781805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.781830 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m49xq\" (UniqueName: \"kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.782747 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.809639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m49xq\" (UniqueName: \"kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq\") pod \"cinder-db-create-mdz95\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.883548 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2l9w\" (UniqueName: \"kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.883604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.884356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.899910 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2l9w\" (UniqueName: \"kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w\") pod \"cinder-e101-account-create-update-qcbwn\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:20 crc kubenswrapper[4982]: I0122 07:21:20.920319 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:21 crc kubenswrapper[4982]: I0122 07:21:20.991428 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:21 crc kubenswrapper[4982]: I0122 07:21:21.407347 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mdz95"] Jan 22 07:21:21 crc kubenswrapper[4982]: W0122 07:21:21.409216 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79c90dad_03c7_4a7d_b392_0a62a5e3cd6e.slice/crio-1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a WatchSource:0}: Error finding container 1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a: Status 404 returned error can't find the container with id 1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a Jan 22 07:21:21 crc kubenswrapper[4982]: I0122 07:21:21.477734 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e101-account-create-update-qcbwn"] Jan 22 07:21:21 crc kubenswrapper[4982]: W0122 07:21:21.479321 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd90e2efe_a9e5_4f92_bc09_97e92cae152c.slice/crio-cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3 WatchSource:0}: Error finding container cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3: Status 404 returned error can't find the container with id cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3 Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.072419 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mdz95" event={"ID":"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e","Type":"ContainerStarted","Data":"f40c7977aa66361b43cce1fb85a0e9640ec417b46b0e1bab1205970879999852"} Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.072753 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mdz95" event={"ID":"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e","Type":"ContainerStarted","Data":"1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a"} Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.073790 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e101-account-create-update-qcbwn" event={"ID":"d90e2efe-a9e5-4f92-bc09-97e92cae152c","Type":"ContainerStarted","Data":"8744c92df627d0b0ece049b3aaee50058b2a19ca9b71876543b644ff8dfcf280"} Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.073825 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e101-account-create-update-qcbwn" event={"ID":"d90e2efe-a9e5-4f92-bc09-97e92cae152c","Type":"ContainerStarted","Data":"cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3"} Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.117465 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-mdz95" podStartSLOduration=2.117444414 podStartE2EDuration="2.117444414s" podCreationTimestamp="2026-01-22 07:21:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:22.091008491 +0000 UTC m=+5742.929646514" watchObservedRunningTime="2026-01-22 07:21:22.117444414 +0000 UTC m=+5742.956082417" Jan 22 07:21:22 crc kubenswrapper[4982]: I0122 07:21:22.118243 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-e101-account-create-update-qcbwn" podStartSLOduration=2.118234695 podStartE2EDuration="2.118234695s" podCreationTimestamp="2026-01-22 07:21:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:22.111994027 +0000 UTC m=+5742.950632110" watchObservedRunningTime="2026-01-22 07:21:22.118234695 +0000 UTC m=+5742.956872698" Jan 22 07:21:23 crc kubenswrapper[4982]: I0122 07:21:23.082319 4982 generic.go:334] "Generic (PLEG): container finished" podID="79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" containerID="f40c7977aa66361b43cce1fb85a0e9640ec417b46b0e1bab1205970879999852" exitCode=0 Jan 22 07:21:23 crc kubenswrapper[4982]: I0122 07:21:23.082456 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mdz95" event={"ID":"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e","Type":"ContainerDied","Data":"f40c7977aa66361b43cce1fb85a0e9640ec417b46b0e1bab1205970879999852"} Jan 22 07:21:23 crc kubenswrapper[4982]: I0122 07:21:23.086337 4982 generic.go:334] "Generic (PLEG): container finished" podID="d90e2efe-a9e5-4f92-bc09-97e92cae152c" containerID="8744c92df627d0b0ece049b3aaee50058b2a19ca9b71876543b644ff8dfcf280" exitCode=0 Jan 22 07:21:23 crc kubenswrapper[4982]: I0122 07:21:23.086368 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e101-account-create-update-qcbwn" event={"ID":"d90e2efe-a9e5-4f92-bc09-97e92cae152c","Type":"ContainerDied","Data":"8744c92df627d0b0ece049b3aaee50058b2a19ca9b71876543b644ff8dfcf280"} Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.560094 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.565827 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.757077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m49xq\" (UniqueName: \"kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq\") pod \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.757206 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2l9w\" (UniqueName: \"kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w\") pod \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.757282 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts\") pod \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\" (UID: \"d90e2efe-a9e5-4f92-bc09-97e92cae152c\") " Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.757354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts\") pod \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\" (UID: \"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e\") " Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.758245 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" (UID: "79c90dad-03c7-4a7d-b392-0a62a5e3cd6e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.758311 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d90e2efe-a9e5-4f92-bc09-97e92cae152c" (UID: "d90e2efe-a9e5-4f92-bc09-97e92cae152c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.766160 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w" (OuterVolumeSpecName: "kube-api-access-z2l9w") pod "d90e2efe-a9e5-4f92-bc09-97e92cae152c" (UID: "d90e2efe-a9e5-4f92-bc09-97e92cae152c"). InnerVolumeSpecName "kube-api-access-z2l9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.767035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq" (OuterVolumeSpecName: "kube-api-access-m49xq") pod "79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" (UID: "79c90dad-03c7-4a7d-b392-0a62a5e3cd6e"). InnerVolumeSpecName "kube-api-access-m49xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.859676 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.859711 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m49xq\" (UniqueName: \"kubernetes.io/projected/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e-kube-api-access-m49xq\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.859725 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2l9w\" (UniqueName: \"kubernetes.io/projected/d90e2efe-a9e5-4f92-bc09-97e92cae152c-kube-api-access-z2l9w\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:24 crc kubenswrapper[4982]: I0122 07:21:24.859736 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90e2efe-a9e5-4f92-bc09-97e92cae152c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.112980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e101-account-create-update-qcbwn" event={"ID":"d90e2efe-a9e5-4f92-bc09-97e92cae152c","Type":"ContainerDied","Data":"cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3"} Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.113487 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf95666964c4e499d99555aaba45e8cfc528f5195e30cd471fa4506920b340b3" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.113145 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e101-account-create-update-qcbwn" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.118250 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mdz95" event={"ID":"79c90dad-03c7-4a7d-b392-0a62a5e3cd6e","Type":"ContainerDied","Data":"1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a"} Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.118309 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1959a5f5500d1d2c57c66ff3925793b3d6fef4fa98279b2ad3f0be917615541a" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.118374 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mdz95" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.944527 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-dn4mz"] Jan 22 07:21:25 crc kubenswrapper[4982]: E0122 07:21:25.945139 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90e2efe-a9e5-4f92-bc09-97e92cae152c" containerName="mariadb-account-create-update" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.945157 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90e2efe-a9e5-4f92-bc09-97e92cae152c" containerName="mariadb-account-create-update" Jan 22 07:21:25 crc kubenswrapper[4982]: E0122 07:21:25.945196 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" containerName="mariadb-database-create" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.945206 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" containerName="mariadb-database-create" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.945434 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d90e2efe-a9e5-4f92-bc09-97e92cae152c" containerName="mariadb-account-create-update" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.945463 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" containerName="mariadb-database-create" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.946220 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.948494 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.948703 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.949833 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qbhhq" Jan 22 07:21:25 crc kubenswrapper[4982]: I0122 07:21:25.955371 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dn4mz"] Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078333 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078460 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078585 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5zl6\" (UniqueName: \"kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078624 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.078739 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180630 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180709 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5zl6\" (UniqueName: \"kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180781 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.180808 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.181076 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.185168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.185304 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.185696 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.193978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.196154 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5zl6\" (UniqueName: \"kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6\") pod \"cinder-db-sync-dn4mz\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.278030 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:26 crc kubenswrapper[4982]: W0122 07:21:26.705940 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6103be86_a098_4bcd_8217_251f25d1a8b9.slice/crio-96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e WatchSource:0}: Error finding container 96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e: Status 404 returned error can't find the container with id 96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e Jan 22 07:21:26 crc kubenswrapper[4982]: I0122 07:21:26.706543 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dn4mz"] Jan 22 07:21:27 crc kubenswrapper[4982]: I0122 07:21:27.146582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dn4mz" event={"ID":"6103be86-a098-4bcd-8217-251f25d1a8b9","Type":"ContainerStarted","Data":"96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e"} Jan 22 07:21:28 crc kubenswrapper[4982]: I0122 07:21:28.160409 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dn4mz" event={"ID":"6103be86-a098-4bcd-8217-251f25d1a8b9","Type":"ContainerStarted","Data":"2d1a27a8684f46ec867f38116bb0ac87dc1d2ed0dffe13595ecba038acfb46db"} Jan 22 07:21:31 crc kubenswrapper[4982]: I0122 07:21:31.193093 4982 generic.go:334] "Generic (PLEG): container finished" podID="6103be86-a098-4bcd-8217-251f25d1a8b9" containerID="2d1a27a8684f46ec867f38116bb0ac87dc1d2ed0dffe13595ecba038acfb46db" exitCode=0 Jan 22 07:21:31 crc kubenswrapper[4982]: I0122 07:21:31.193255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dn4mz" event={"ID":"6103be86-a098-4bcd-8217-251f25d1a8b9","Type":"ContainerDied","Data":"2d1a27a8684f46ec867f38116bb0ac87dc1d2ed0dffe13595ecba038acfb46db"} Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.531658 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593050 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593133 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593169 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593206 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5zl6\" (UniqueName: \"kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593251 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts\") pod \"6103be86-a098-4bcd-8217-251f25d1a8b9\" (UID: \"6103be86-a098-4bcd-8217-251f25d1a8b9\") " Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.593953 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.601576 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6" (OuterVolumeSpecName: "kube-api-access-h5zl6") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "kube-api-access-h5zl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.602043 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts" (OuterVolumeSpecName: "scripts") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.602154 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.630989 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.642176 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data" (OuterVolumeSpecName: "config-data") pod "6103be86-a098-4bcd-8217-251f25d1a8b9" (UID: "6103be86-a098-4bcd-8217-251f25d1a8b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694437 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6103be86-a098-4bcd-8217-251f25d1a8b9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694472 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694503 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5zl6\" (UniqueName: \"kubernetes.io/projected/6103be86-a098-4bcd-8217-251f25d1a8b9-kube-api-access-h5zl6\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694512 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694522 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:32 crc kubenswrapper[4982]: I0122 07:21:32.694531 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6103be86-a098-4bcd-8217-251f25d1a8b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.214968 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dn4mz" event={"ID":"6103be86-a098-4bcd-8217-251f25d1a8b9","Type":"ContainerDied","Data":"96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e"} Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.215009 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96436205ec88c6a4e7281b226481d830562a7a13815212d2aa3ff150b7cf771e" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.215061 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dn4mz" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.453067 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:21:33 crc kubenswrapper[4982]: E0122 07:21:33.453885 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6103be86-a098-4bcd-8217-251f25d1a8b9" containerName="cinder-db-sync" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.453904 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6103be86-a098-4bcd-8217-251f25d1a8b9" containerName="cinder-db-sync" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.454131 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6103be86-a098-4bcd-8217-251f25d1a8b9" containerName="cinder-db-sync" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.455306 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.470087 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.511453 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prvbn\" (UniqueName: \"kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.511505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.511541 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.511740 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.511970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.613096 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.613194 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prvbn\" (UniqueName: \"kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.613461 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.613511 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.613573 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.614340 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.614502 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.614536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.614611 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.646974 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prvbn\" (UniqueName: \"kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn\") pod \"dnsmasq-dns-5c599684d7-dhnf7\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.654409 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.655757 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.665443 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.665642 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.665564 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.671419 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qbhhq" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.755245 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.805268 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819489 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819543 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819676 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819722 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bhl7\" (UniqueName: \"kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.819757 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920578 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920624 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bhl7\" (UniqueName: \"kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920720 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920737 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920752 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920775 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.920958 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.921383 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.929258 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.931456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.931906 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.934587 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:33 crc kubenswrapper[4982]: I0122 07:21:33.947512 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bhl7\" (UniqueName: \"kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7\") pod \"cinder-api-0\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " pod="openstack/cinder-api-0" Jan 22 07:21:34 crc kubenswrapper[4982]: I0122 07:21:34.054288 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:21:34 crc kubenswrapper[4982]: I0122 07:21:34.252411 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:21:34 crc kubenswrapper[4982]: I0122 07:21:34.593513 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:21:34 crc kubenswrapper[4982]: W0122 07:21:34.596391 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode418a3f5_6769_46fa_98a9_a74f59f829a6.slice/crio-26a770e4b9a817540c56c741c7ff4a859b0818b0937a69bed1dcabce7e2700cd WatchSource:0}: Error finding container 26a770e4b9a817540c56c741c7ff4a859b0818b0937a69bed1dcabce7e2700cd: Status 404 returned error can't find the container with id 26a770e4b9a817540c56c741c7ff4a859b0818b0937a69bed1dcabce7e2700cd Jan 22 07:21:35 crc kubenswrapper[4982]: I0122 07:21:35.241823 4982 generic.go:334] "Generic (PLEG): container finished" podID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerID="eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d" exitCode=0 Jan 22 07:21:35 crc kubenswrapper[4982]: I0122 07:21:35.242082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" event={"ID":"cfe2c386-c017-4c40-add9-65ba1dc29f41","Type":"ContainerDied","Data":"eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d"} Jan 22 07:21:35 crc kubenswrapper[4982]: I0122 07:21:35.242280 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" event={"ID":"cfe2c386-c017-4c40-add9-65ba1dc29f41","Type":"ContainerStarted","Data":"bb5ecb83e1c79d2847f3bfdcce8b6ea3942288b722682373f36e37290655c093"} Jan 22 07:21:35 crc kubenswrapper[4982]: I0122 07:21:35.248656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerStarted","Data":"26a770e4b9a817540c56c741c7ff4a859b0818b0937a69bed1dcabce7e2700cd"} Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.259027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" event={"ID":"cfe2c386-c017-4c40-add9-65ba1dc29f41","Type":"ContainerStarted","Data":"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e"} Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.259504 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.260664 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerStarted","Data":"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475"} Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.260686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerStarted","Data":"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994"} Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.260804 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.281790 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" podStartSLOduration=3.2817741590000002 podStartE2EDuration="3.281774159s" podCreationTimestamp="2026-01-22 07:21:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:36.275616343 +0000 UTC m=+5757.114254336" watchObservedRunningTime="2026-01-22 07:21:36.281774159 +0000 UTC m=+5757.120412162" Jan 22 07:21:36 crc kubenswrapper[4982]: I0122 07:21:36.294153 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.294132723 podStartE2EDuration="3.294132723s" podCreationTimestamp="2026-01-22 07:21:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:36.2903126 +0000 UTC m=+5757.128950613" watchObservedRunningTime="2026-01-22 07:21:36.294132723 +0000 UTC m=+5757.132770726" Jan 22 07:21:43 crc kubenswrapper[4982]: I0122 07:21:43.807951 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:21:43 crc kubenswrapper[4982]: I0122 07:21:43.881561 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:43 crc kubenswrapper[4982]: I0122 07:21:43.882127 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-795b859695-ft69v" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="dnsmasq-dns" containerID="cri-o://27b21e85f1d3640b6ff3e32e788867934af20a1ac1044f24f5b726bb916c53e0" gracePeriod=10 Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.333181 4982 generic.go:334] "Generic (PLEG): container finished" podID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerID="27b21e85f1d3640b6ff3e32e788867934af20a1ac1044f24f5b726bb916c53e0" exitCode=0 Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.333251 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795b859695-ft69v" event={"ID":"0107b32f-c775-4e02-b7da-0b3ce5962efa","Type":"ContainerDied","Data":"27b21e85f1d3640b6ff3e32e788867934af20a1ac1044f24f5b726bb916c53e0"} Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.427483 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.531416 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc\") pod \"0107b32f-c775-4e02-b7da-0b3ce5962efa\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.531523 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config\") pod \"0107b32f-c775-4e02-b7da-0b3ce5962efa\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.531540 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb\") pod \"0107b32f-c775-4e02-b7da-0b3ce5962efa\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.531565 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqxkz\" (UniqueName: \"kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz\") pod \"0107b32f-c775-4e02-b7da-0b3ce5962efa\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.531608 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb\") pod \"0107b32f-c775-4e02-b7da-0b3ce5962efa\" (UID: \"0107b32f-c775-4e02-b7da-0b3ce5962efa\") " Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.555870 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz" (OuterVolumeSpecName: "kube-api-access-nqxkz") pod "0107b32f-c775-4e02-b7da-0b3ce5962efa" (UID: "0107b32f-c775-4e02-b7da-0b3ce5962efa"). InnerVolumeSpecName "kube-api-access-nqxkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.579206 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0107b32f-c775-4e02-b7da-0b3ce5962efa" (UID: "0107b32f-c775-4e02-b7da-0b3ce5962efa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.581914 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0107b32f-c775-4e02-b7da-0b3ce5962efa" (UID: "0107b32f-c775-4e02-b7da-0b3ce5962efa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.587153 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config" (OuterVolumeSpecName: "config") pod "0107b32f-c775-4e02-b7da-0b3ce5962efa" (UID: "0107b32f-c775-4e02-b7da-0b3ce5962efa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.588425 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0107b32f-c775-4e02-b7da-0b3ce5962efa" (UID: "0107b32f-c775-4e02-b7da-0b3ce5962efa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.634237 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.634267 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.634278 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqxkz\" (UniqueName: \"kubernetes.io/projected/0107b32f-c775-4e02-b7da-0b3ce5962efa-kube-api-access-nqxkz\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.634286 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:44 crc kubenswrapper[4982]: I0122 07:21:44.634294 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0107b32f-c775-4e02-b7da-0b3ce5962efa-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.355740 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795b859695-ft69v" event={"ID":"0107b32f-c775-4e02-b7da-0b3ce5962efa","Type":"ContainerDied","Data":"224a0956a2a26ed0d7ff46e6ab4cfc611c022c248def762800fada279f1a28c4"} Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.355792 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795b859695-ft69v" Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.355797 4982 scope.go:117] "RemoveContainer" containerID="27b21e85f1d3640b6ff3e32e788867934af20a1ac1044f24f5b726bb916c53e0" Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.422040 4982 scope.go:117] "RemoveContainer" containerID="08160ab94d04a1560294cbf7296be24d31528d32b958407b42ca4b7039b95569" Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.456835 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.478760 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-795b859695-ft69v"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.495212 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.495529 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" containerID="cri-o://9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.496066 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" containerID="cri-o://f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.517319 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.517735 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="500eb623-9eb7-41c6-938b-029cb9807cce" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f8190e9200eaa63dfc1cda958d79266c85e240f81e0345bb8c7db3bdd52da720" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.552548 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.552907 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" containerID="cri-o://b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.553082 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" containerID="cri-o://7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.565080 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.565432 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerName="nova-scheduler-scheduler" containerID="cri-o://1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.578111 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.578386 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerName="nova-cell1-conductor-conductor" containerID="cri-o://63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" gracePeriod=30 Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.730981 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" path="/var/lib/kubelet/pods/0107b32f-c775-4e02-b7da-0b3ce5962efa/volumes" Jan 22 07:21:45 crc kubenswrapper[4982]: I0122 07:21:45.992241 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.371749 4982 generic.go:334] "Generic (PLEG): container finished" podID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerID="9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2" exitCode=143 Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.372273 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerDied","Data":"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2"} Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.374427 4982 generic.go:334] "Generic (PLEG): container finished" podID="500eb623-9eb7-41c6-938b-029cb9807cce" containerID="f8190e9200eaa63dfc1cda958d79266c85e240f81e0345bb8c7db3bdd52da720" exitCode=0 Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.374499 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"500eb623-9eb7-41c6-938b-029cb9807cce","Type":"ContainerDied","Data":"f8190e9200eaa63dfc1cda958d79266c85e240f81e0345bb8c7db3bdd52da720"} Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.378903 4982 generic.go:334] "Generic (PLEG): container finished" podID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerID="b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a" exitCode=143 Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.378943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerDied","Data":"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a"} Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.552631 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.681933 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle\") pod \"500eb623-9eb7-41c6-938b-029cb9807cce\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.682078 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data\") pod \"500eb623-9eb7-41c6-938b-029cb9807cce\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.682198 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbtgq\" (UniqueName: \"kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq\") pod \"500eb623-9eb7-41c6-938b-029cb9807cce\" (UID: \"500eb623-9eb7-41c6-938b-029cb9807cce\") " Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.697134 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq" (OuterVolumeSpecName: "kube-api-access-jbtgq") pod "500eb623-9eb7-41c6-938b-029cb9807cce" (UID: "500eb623-9eb7-41c6-938b-029cb9807cce"). InnerVolumeSpecName "kube-api-access-jbtgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.709896 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data" (OuterVolumeSpecName: "config-data") pod "500eb623-9eb7-41c6-938b-029cb9807cce" (UID: "500eb623-9eb7-41c6-938b-029cb9807cce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.717294 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "500eb623-9eb7-41c6-938b-029cb9807cce" (UID: "500eb623-9eb7-41c6-938b-029cb9807cce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.783915 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.783951 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbtgq\" (UniqueName: \"kubernetes.io/projected/500eb623-9eb7-41c6-938b-029cb9807cce-kube-api-access-jbtgq\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:46 crc kubenswrapper[4982]: I0122 07:21:46.783960 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/500eb623-9eb7-41c6-938b-029cb9807cce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.390865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"500eb623-9eb7-41c6-938b-029cb9807cce","Type":"ContainerDied","Data":"890ba15d77be1fad6520a7ed0c92132be0ae80e2adc5f43dc082a6485bf8c6ff"} Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.390979 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.391190 4982 scope.go:117] "RemoveContainer" containerID="f8190e9200eaa63dfc1cda958d79266c85e240f81e0345bb8c7db3bdd52da720" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.435048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.452319 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.463599 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:47 crc kubenswrapper[4982]: E0122 07:21:47.463990 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="dnsmasq-dns" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464001 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="dnsmasq-dns" Jan 22 07:21:47 crc kubenswrapper[4982]: E0122 07:21:47.464011 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="init" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464017 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="init" Jan 22 07:21:47 crc kubenswrapper[4982]: E0122 07:21:47.464032 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500eb623-9eb7-41c6-938b-029cb9807cce" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464038 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="500eb623-9eb7-41c6-938b-029cb9807cce" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464193 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0107b32f-c775-4e02-b7da-0b3ce5962efa" containerName="dnsmasq-dns" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464216 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="500eb623-9eb7-41c6-938b-029cb9807cce" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.464844 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.473129 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.473662 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.597915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.597991 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.598020 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7vvb\" (UniqueName: \"kubernetes.io/projected/8db70abd-afc0-4f5b-90bc-d146bcdb076c-kube-api-access-g7vvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.714279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.714339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.714367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7vvb\" (UniqueName: \"kubernetes.io/projected/8db70abd-afc0-4f5b-90bc-d146bcdb076c-kube-api-access-g7vvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.730838 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.733331 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db70abd-afc0-4f5b-90bc-d146bcdb076c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.746088 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500eb623-9eb7-41c6-938b-029cb9807cce" path="/var/lib/kubelet/pods/500eb623-9eb7-41c6-938b-029cb9807cce/volumes" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.747323 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7vvb\" (UniqueName: \"kubernetes.io/projected/8db70abd-afc0-4f5b-90bc-d146bcdb076c-kube-api-access-g7vvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"8db70abd-afc0-4f5b-90bc-d146bcdb076c\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:47 crc kubenswrapper[4982]: I0122 07:21:47.804398 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:48 crc kubenswrapper[4982]: E0122 07:21:48.066674 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 07:21:48 crc kubenswrapper[4982]: E0122 07:21:48.068414 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 07:21:48 crc kubenswrapper[4982]: E0122 07:21:48.069530 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 07:21:48 crc kubenswrapper[4982]: E0122 07:21:48.069565 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerName="nova-scheduler-scheduler" Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.357051 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.401916 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8db70abd-afc0-4f5b-90bc-d146bcdb076c","Type":"ContainerStarted","Data":"b4272293035f4b839229d25b7b0eade37b657d5d3f76789003773ab3b387d394"} Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.646053 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": read tcp 10.217.0.2:39042->10.217.1.70:8774: read: connection reset by peer" Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.646074 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.70:8774/\": read tcp 10.217.0.2:39026->10.217.1.70:8774: read: connection reset by peer" Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.693448 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": read tcp 10.217.0.2:49018->10.217.1.71:8775: read: connection reset by peer" Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.693476 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.71:8775/\": read tcp 10.217.0.2:49028->10.217.1.71:8775: read: connection reset by peer" Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.697444 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.697668 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerName="nova-cell0-conductor-conductor" containerID="cri-o://c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" gracePeriod=30 Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.974307 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:21:48 crc kubenswrapper[4982]: I0122 07:21:48.974366 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.216743 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.362078 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle\") pod \"788263eb-cb47-4b21-aa3c-46a3eab75a99\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.362201 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs\") pod \"788263eb-cb47-4b21-aa3c-46a3eab75a99\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.362278 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data\") pod \"788263eb-cb47-4b21-aa3c-46a3eab75a99\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.362360 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zs7r\" (UniqueName: \"kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r\") pod \"788263eb-cb47-4b21-aa3c-46a3eab75a99\" (UID: \"788263eb-cb47-4b21-aa3c-46a3eab75a99\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.363259 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs" (OuterVolumeSpecName: "logs") pod "788263eb-cb47-4b21-aa3c-46a3eab75a99" (UID: "788263eb-cb47-4b21-aa3c-46a3eab75a99"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.369068 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r" (OuterVolumeSpecName: "kube-api-access-5zs7r") pod "788263eb-cb47-4b21-aa3c-46a3eab75a99" (UID: "788263eb-cb47-4b21-aa3c-46a3eab75a99"). InnerVolumeSpecName "kube-api-access-5zs7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.412534 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.418733 4982 generic.go:334] "Generic (PLEG): container finished" podID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerID="63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" exitCode=0 Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.418777 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c198eda0-01d3-443e-b342-5039d9cfb0d5","Type":"ContainerDied","Data":"63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.420021 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "788263eb-cb47-4b21-aa3c-46a3eab75a99" (UID: "788263eb-cb47-4b21-aa3c-46a3eab75a99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.421341 4982 generic.go:334] "Generic (PLEG): container finished" podID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerID="f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75" exitCode=0 Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.421416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerDied","Data":"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.421444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"788263eb-cb47-4b21-aa3c-46a3eab75a99","Type":"ContainerDied","Data":"910c9c43b0a9fd69834c3aad23314d46a026eee4b5e4a32af3bec04266e92e27"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.421465 4982 scope.go:117] "RemoveContainer" containerID="f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.421617 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.428120 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data" (OuterVolumeSpecName: "config-data") pod "788263eb-cb47-4b21-aa3c-46a3eab75a99" (UID: "788263eb-cb47-4b21-aa3c-46a3eab75a99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.431338 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8db70abd-afc0-4f5b-90bc-d146bcdb076c","Type":"ContainerStarted","Data":"0a0c0b429553662cde11fca2e2c073cb2da94d314233738c3c7f26c4e7a4d8dc"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.460377 4982 generic.go:334] "Generic (PLEG): container finished" podID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerID="7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413" exitCode=0 Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.460418 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerDied","Data":"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.460443 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7","Type":"ContainerDied","Data":"2fb33cd3219ed5b3201f4a64d19a7052de3b6528b7143c45f6b98ba8da439705"} Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.460442 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.463917 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hq9lm\" (UniqueName: \"kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm\") pod \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464002 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data\") pod \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464019 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs\") pod \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464170 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle\") pod \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\" (UID: \"35d112d5-9f8f-4128-b4f5-a964e5b7f4e7\") " Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464526 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464537 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/788263eb-cb47-4b21-aa3c-46a3eab75a99-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464546 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788263eb-cb47-4b21-aa3c-46a3eab75a99-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.464556 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zs7r\" (UniqueName: \"kubernetes.io/projected/788263eb-cb47-4b21-aa3c-46a3eab75a99-kube-api-access-5zs7r\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.470164 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs" (OuterVolumeSpecName: "logs") pod "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" (UID: "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.475765 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.475743986 podStartE2EDuration="2.475743986s" podCreationTimestamp="2026-01-22 07:21:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:49.469250412 +0000 UTC m=+5770.307888425" watchObservedRunningTime="2026-01-22 07:21:49.475743986 +0000 UTC m=+5770.314381989" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.482091 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm" (OuterVolumeSpecName: "kube-api-access-hq9lm") pod "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" (UID: "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7"). InnerVolumeSpecName "kube-api-access-hq9lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.500092 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data" (OuterVolumeSpecName: "config-data") pod "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" (UID: "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.504392 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" (UID: "35d112d5-9f8f-4128-b4f5-a964e5b7f4e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.507888 4982 scope.go:117] "RemoveContainer" containerID="9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.534201 4982 scope.go:117] "RemoveContainer" containerID="f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.534964 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75\": container with ID starting with f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75 not found: ID does not exist" containerID="f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.535020 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75"} err="failed to get container status \"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75\": rpc error: code = NotFound desc = could not find container \"f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75\": container with ID starting with f157609e35396abbb2bccc9d6be2832223119d3a177cecb456d4797f77614a75 not found: ID does not exist" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.535056 4982 scope.go:117] "RemoveContainer" containerID="9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.535451 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2\": container with ID starting with 9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2 not found: ID does not exist" containerID="9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.535480 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2"} err="failed to get container status \"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2\": rpc error: code = NotFound desc = could not find container \"9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2\": container with ID starting with 9079edd4e93bd6e1170499f05c3c7331a3e10916dd21024f3b3b46ef97d305f2 not found: ID does not exist" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.535497 4982 scope.go:117] "RemoveContainer" containerID="7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.553198 4982 scope.go:117] "RemoveContainer" containerID="b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.566920 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.566957 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hq9lm\" (UniqueName: \"kubernetes.io/projected/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-kube-api-access-hq9lm\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.566974 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.566985 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.594765 4982 scope.go:117] "RemoveContainer" containerID="7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.595508 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413\": container with ID starting with 7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413 not found: ID does not exist" containerID="7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.595569 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413"} err="failed to get container status \"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413\": rpc error: code = NotFound desc = could not find container \"7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413\": container with ID starting with 7c57734f027e290dd8c4e2026f467642553d3b04de7458f4ba6839babd117413 not found: ID does not exist" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.595616 4982 scope.go:117] "RemoveContainer" containerID="b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.599442 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a\": container with ID starting with b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a not found: ID does not exist" containerID="b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.599486 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a"} err="failed to get container status \"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a\": rpc error: code = NotFound desc = could not find container \"b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a\": container with ID starting with b92e6f95b1c6e067eedc37a3546664ff0c442c14ecc77319de7bc054e5bfd68a not found: ID does not exist" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.791574 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.818911 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.819624 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e is running failed: container process not found" containerID="63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.827717 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e is running failed: container process not found" containerID="63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.831168 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e is running failed: container process not found" containerID="63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.831247 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerName="nova-cell1-conductor-conductor" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898067 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.898472 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898485 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.898501 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898507 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.898519 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898525 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" Jan 22 07:21:49 crc kubenswrapper[4982]: E0122 07:21:49.898539 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898545 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898705 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-log" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898725 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-log" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898733 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" containerName="nova-metadata-metadata" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.898743 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" containerName="nova-api-api" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.907215 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.910246 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.910446 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.938721 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.952615 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.963944 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.965927 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.968192 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.971393 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.979056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-logs\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.979171 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.979209 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-config-data\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:49 crc kubenswrapper[4982]: I0122 07:21:49.979287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtd5c\" (UniqueName: \"kubernetes.io/projected/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-kube-api-access-rtd5c\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.053725 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtd5c\" (UniqueName: \"kubernetes.io/projected/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-kube-api-access-rtd5c\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089147 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp5n2\" (UniqueName: \"kubernetes.io/projected/272317b4-c796-4e4f-9867-1d37a2bf3ac7-kube-api-access-sp5n2\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089162 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272317b4-c796-4e4f-9867-1d37a2bf3ac7-logs\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089193 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-logs\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089232 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-config-data\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.089280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-config-data\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.090968 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-logs\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.101482 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-config-data\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.102697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.128785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtd5c\" (UniqueName: \"kubernetes.io/projected/ad9cd174-57bf-44d9-b5aa-abeccc5bcac8-kube-api-access-rtd5c\") pod \"nova-api-0\" (UID: \"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8\") " pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.190115 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle\") pod \"c198eda0-01d3-443e-b342-5039d9cfb0d5\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.190175 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data\") pod \"c198eda0-01d3-443e-b342-5039d9cfb0d5\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.190375 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lclj9\" (UniqueName: \"kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9\") pod \"c198eda0-01d3-443e-b342-5039d9cfb0d5\" (UID: \"c198eda0-01d3-443e-b342-5039d9cfb0d5\") " Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.190716 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.191403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp5n2\" (UniqueName: \"kubernetes.io/projected/272317b4-c796-4e4f-9867-1d37a2bf3ac7-kube-api-access-sp5n2\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.191427 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272317b4-c796-4e4f-9867-1d37a2bf3ac7-logs\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.191513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-config-data\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.193983 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272317b4-c796-4e4f-9867-1d37a2bf3ac7-logs\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.197614 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-config-data\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.199237 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272317b4-c796-4e4f-9867-1d37a2bf3ac7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.210291 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9" (OuterVolumeSpecName: "kube-api-access-lclj9") pod "c198eda0-01d3-443e-b342-5039d9cfb0d5" (UID: "c198eda0-01d3-443e-b342-5039d9cfb0d5"). InnerVolumeSpecName "kube-api-access-lclj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.213668 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp5n2\" (UniqueName: \"kubernetes.io/projected/272317b4-c796-4e4f-9867-1d37a2bf3ac7-kube-api-access-sp5n2\") pod \"nova-metadata-0\" (UID: \"272317b4-c796-4e4f-9867-1d37a2bf3ac7\") " pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.214998 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data" (OuterVolumeSpecName: "config-data") pod "c198eda0-01d3-443e-b342-5039d9cfb0d5" (UID: "c198eda0-01d3-443e-b342-5039d9cfb0d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.228782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c198eda0-01d3-443e-b342-5039d9cfb0d5" (UID: "c198eda0-01d3-443e-b342-5039d9cfb0d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.293085 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lclj9\" (UniqueName: \"kubernetes.io/projected/c198eda0-01d3-443e-b342-5039d9cfb0d5-kube-api-access-lclj9\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.293128 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.293139 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c198eda0-01d3-443e-b342-5039d9cfb0d5-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.352541 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.372978 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.514514 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.515167 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c198eda0-01d3-443e-b342-5039d9cfb0d5","Type":"ContainerDied","Data":"bae5bc9af01c511a52f88735df06e4fe9f2694d3f6c0c8a3c41cdd586c6fbe62"} Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.515224 4982 scope.go:117] "RemoveContainer" containerID="63669ecbdb5cd393539ad12c91a9004be45f61e670e7b6b917c54a26bdfad19e" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.566889 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.589935 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.625225 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:50 crc kubenswrapper[4982]: E0122 07:21:50.625608 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerName="nova-cell1-conductor-conductor" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.625622 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerName="nova-cell1-conductor-conductor" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.625816 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" containerName="nova-cell1-conductor-conductor" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.626957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.630373 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.633928 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.705209 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.705263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7thvm\" (UniqueName: \"kubernetes.io/projected/5720f611-259b-475d-b62b-12e2121dc041-kube-api-access-7thvm\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.705310 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.807937 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.808243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.808867 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7thvm\" (UniqueName: \"kubernetes.io/projected/5720f611-259b-475d-b62b-12e2121dc041-kube-api-access-7thvm\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.820586 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.821643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5720f611-259b-475d-b62b-12e2121dc041-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.863613 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7thvm\" (UniqueName: \"kubernetes.io/projected/5720f611-259b-475d-b62b-12e2121dc041-kube-api-access-7thvm\") pod \"nova-cell1-conductor-0\" (UID: \"5720f611-259b-475d-b62b-12e2121dc041\") " pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:50 crc kubenswrapper[4982]: I0122 07:21:50.966697 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.004287 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.099239 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 07:21:51 crc kubenswrapper[4982]: W0122 07:21:51.108788 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272317b4_c796_4e4f_9867_1d37a2bf3ac7.slice/crio-f19a74598cd7f28f4af38c260181878c350325ca62986f7ce90ddb25c8f67b2e WatchSource:0}: Error finding container f19a74598cd7f28f4af38c260181878c350325ca62986f7ce90ddb25c8f67b2e: Status 404 returned error can't find the container with id f19a74598cd7f28f4af38c260181878c350325ca62986f7ce90ddb25c8f67b2e Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.527406 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.532214 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272317b4-c796-4e4f-9867-1d37a2bf3ac7","Type":"ContainerStarted","Data":"af22530f250c85f4197e11453d3450dc0bc1737f52ba5fab12bef3ed2420433b"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.532250 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272317b4-c796-4e4f-9867-1d37a2bf3ac7","Type":"ContainerStarted","Data":"4f2b7c8fc03f3989422a332fde874f722112c94209c0f84552cd17261fa56ea5"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.532262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272317b4-c796-4e4f-9867-1d37a2bf3ac7","Type":"ContainerStarted","Data":"f19a74598cd7f28f4af38c260181878c350325ca62986f7ce90ddb25c8f67b2e"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.537923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8","Type":"ContainerStarted","Data":"dd4c99b8480e27c73a9171fe8191fbe7877d230c2239d8267ba3ec70eb9870eb"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.537971 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8","Type":"ContainerStarted","Data":"fbeb22c1aa4be916e880646fa9ba33ab8f384194be2101bd41de731b822b264d"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.537981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad9cd174-57bf-44d9-b5aa-abeccc5bcac8","Type":"ContainerStarted","Data":"9856e0f9f449e8beeb17c4e2e719d4e4f2cd67e4ff29f673336a4e7c9da91a58"} Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.562767 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.562744952 podStartE2EDuration="2.562744952s" podCreationTimestamp="2026-01-22 07:21:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:51.554657043 +0000 UTC m=+5772.393295046" watchObservedRunningTime="2026-01-22 07:21:51.562744952 +0000 UTC m=+5772.401382955" Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.582273 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.582250928 podStartE2EDuration="2.582250928s" podCreationTimestamp="2026-01-22 07:21:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:51.578245919 +0000 UTC m=+5772.416883922" watchObservedRunningTime="2026-01-22 07:21:51.582250928 +0000 UTC m=+5772.420888931" Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.732382 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35d112d5-9f8f-4128-b4f5-a964e5b7f4e7" path="/var/lib/kubelet/pods/35d112d5-9f8f-4128-b4f5-a964e5b7f4e7/volumes" Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.733150 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="788263eb-cb47-4b21-aa3c-46a3eab75a99" path="/var/lib/kubelet/pods/788263eb-cb47-4b21-aa3c-46a3eab75a99/volumes" Jan 22 07:21:51 crc kubenswrapper[4982]: I0122 07:21:51.733678 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c198eda0-01d3-443e-b342-5039d9cfb0d5" path="/var/lib/kubelet/pods/c198eda0-01d3-443e-b342-5039d9cfb0d5/volumes" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.309881 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.323485 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.332183 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.336303 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.336407 4982 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerName="nova-cell0-conductor-conductor" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.351798 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data\") pod \"c2965fcc-c5e4-4066-ade7-4262cbca707f\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.351993 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7txcw\" (UniqueName: \"kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw\") pod \"c2965fcc-c5e4-4066-ade7-4262cbca707f\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.352036 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle\") pod \"c2965fcc-c5e4-4066-ade7-4262cbca707f\" (UID: \"c2965fcc-c5e4-4066-ade7-4262cbca707f\") " Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.362124 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw" (OuterVolumeSpecName: "kube-api-access-7txcw") pod "c2965fcc-c5e4-4066-ade7-4262cbca707f" (UID: "c2965fcc-c5e4-4066-ade7-4262cbca707f"). InnerVolumeSpecName "kube-api-access-7txcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.395359 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data" (OuterVolumeSpecName: "config-data") pod "c2965fcc-c5e4-4066-ade7-4262cbca707f" (UID: "c2965fcc-c5e4-4066-ade7-4262cbca707f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.444510 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2965fcc-c5e4-4066-ade7-4262cbca707f" (UID: "c2965fcc-c5e4-4066-ade7-4262cbca707f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.453483 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.453520 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2965fcc-c5e4-4066-ade7-4262cbca707f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.453533 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7txcw\" (UniqueName: \"kubernetes.io/projected/c2965fcc-c5e4-4066-ade7-4262cbca707f-kube-api-access-7txcw\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.558351 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" exitCode=0 Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.558746 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2965fcc-c5e4-4066-ade7-4262cbca707f","Type":"ContainerDied","Data":"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1"} Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.558782 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2965fcc-c5e4-4066-ade7-4262cbca707f","Type":"ContainerDied","Data":"54ea072336e85ceeddfa12db976fdf0cb5e2697b0c98b5e61ebe8ba0b75ff7c5"} Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.558804 4982 scope.go:117] "RemoveContainer" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.559161 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.565118 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5720f611-259b-475d-b62b-12e2121dc041","Type":"ContainerStarted","Data":"ac864c8be76e62d68433b1fbab1c316d33ae0c2b4ce946661294948a23e14fee"} Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.565175 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5720f611-259b-475d-b62b-12e2121dc041","Type":"ContainerStarted","Data":"f4f7b3fb7f985157a1577a8b8ae36e310338b2624c7c8584b97c92ba2706d2d0"} Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.565288 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.589391 4982 scope.go:117] "RemoveContainer" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.592301 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1\": container with ID starting with 1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1 not found: ID does not exist" containerID="1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.592354 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1"} err="failed to get container status \"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1\": rpc error: code = NotFound desc = could not find container \"1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1\": container with ID starting with 1c5bd916cf142d700fe5897fbf41c569870331decd80db892f92290d68cec4a1 not found: ID does not exist" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.592549 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.59253555 podStartE2EDuration="2.59253555s" podCreationTimestamp="2026-01-22 07:21:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:52.583227328 +0000 UTC m=+5773.421865351" watchObservedRunningTime="2026-01-22 07:21:52.59253555 +0000 UTC m=+5773.431173553" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.618893 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.646468 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd238384e_a7ec_4417_8c61_362fb936cd7b.slice/crio-c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2965fcc_c5e4_4066_ade7_4262cbca707f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2965fcc_c5e4_4066_ade7_4262cbca707f.slice/crio-54ea072336e85ceeddfa12db976fdf0cb5e2697b0c98b5e61ebe8ba0b75ff7c5\": RecentStats: unable to find data in memory cache]" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.682306 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.712803 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:52 crc kubenswrapper[4982]: E0122 07:21:52.713380 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerName="nova-scheduler-scheduler" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.713406 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerName="nova-scheduler-scheduler" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.713649 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" containerName="nova-scheduler-scheduler" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.715223 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.718237 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.724474 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.795188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-config-data\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.795873 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.795907 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkkqd\" (UniqueName: \"kubernetes.io/projected/e492a7ae-fef5-4822-ba8f-2d01628d8d89-kube-api-access-lkkqd\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.810030 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.897941 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-config-data\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.898278 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.898467 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkkqd\" (UniqueName: \"kubernetes.io/projected/e492a7ae-fef5-4822-ba8f-2d01628d8d89-kube-api-access-lkkqd\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.902902 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-config-data\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.906563 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492a7ae-fef5-4822-ba8f-2d01628d8d89-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:52 crc kubenswrapper[4982]: I0122 07:21:52.914725 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkkqd\" (UniqueName: \"kubernetes.io/projected/e492a7ae-fef5-4822-ba8f-2d01628d8d89-kube-api-access-lkkqd\") pod \"nova-scheduler-0\" (UID: \"e492a7ae-fef5-4822-ba8f-2d01628d8d89\") " pod="openstack/nova-scheduler-0" Jan 22 07:21:53 crc kubenswrapper[4982]: I0122 07:21:53.040921 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 07:21:53 crc kubenswrapper[4982]: I0122 07:21:53.514943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 07:21:53 crc kubenswrapper[4982]: W0122 07:21:53.523839 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode492a7ae_fef5_4822_ba8f_2d01628d8d89.slice/crio-50f5e81be36a97cd441a955387c28d20be26072222c6b0cb7614acff201dcfee WatchSource:0}: Error finding container 50f5e81be36a97cd441a955387c28d20be26072222c6b0cb7614acff201dcfee: Status 404 returned error can't find the container with id 50f5e81be36a97cd441a955387c28d20be26072222c6b0cb7614acff201dcfee Jan 22 07:21:53 crc kubenswrapper[4982]: I0122 07:21:53.575308 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e492a7ae-fef5-4822-ba8f-2d01628d8d89","Type":"ContainerStarted","Data":"50f5e81be36a97cd441a955387c28d20be26072222c6b0cb7614acff201dcfee"} Jan 22 07:21:53 crc kubenswrapper[4982]: I0122 07:21:53.731542 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2965fcc-c5e4-4066-ade7-4262cbca707f" path="/var/lib/kubelet/pods/c2965fcc-c5e4-4066-ade7-4262cbca707f/volumes" Jan 22 07:21:54 crc kubenswrapper[4982]: I0122 07:21:54.587717 4982 generic.go:334] "Generic (PLEG): container finished" podID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerID="c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" exitCode=0 Jan 22 07:21:54 crc kubenswrapper[4982]: I0122 07:21:54.588024 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d238384e-a7ec-4417-8c61-362fb936cd7b","Type":"ContainerDied","Data":"c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688"} Jan 22 07:21:54 crc kubenswrapper[4982]: I0122 07:21:54.595544 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e492a7ae-fef5-4822-ba8f-2d01628d8d89","Type":"ContainerStarted","Data":"0fa80b0e76b9519eb5d62f80f5dd26892e415606ff8267069f4d90c4ec41b9bf"} Jan 22 07:21:54 crc kubenswrapper[4982]: I0122 07:21:54.626682 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.626637568 podStartE2EDuration="2.626637568s" podCreationTimestamp="2026-01-22 07:21:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:54.611583021 +0000 UTC m=+5775.450221024" watchObservedRunningTime="2026-01-22 07:21:54.626637568 +0000 UTC m=+5775.465275571" Jan 22 07:21:54 crc kubenswrapper[4982]: I0122 07:21:54.912196 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.054090 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data\") pod \"d238384e-a7ec-4417-8c61-362fb936cd7b\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.054148 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle\") pod \"d238384e-a7ec-4417-8c61-362fb936cd7b\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.054376 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sr4m\" (UniqueName: \"kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m\") pod \"d238384e-a7ec-4417-8c61-362fb936cd7b\" (UID: \"d238384e-a7ec-4417-8c61-362fb936cd7b\") " Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.064664 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m" (OuterVolumeSpecName: "kube-api-access-5sr4m") pod "d238384e-a7ec-4417-8c61-362fb936cd7b" (UID: "d238384e-a7ec-4417-8c61-362fb936cd7b"). InnerVolumeSpecName "kube-api-access-5sr4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.090217 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d238384e-a7ec-4417-8c61-362fb936cd7b" (UID: "d238384e-a7ec-4417-8c61-362fb936cd7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.091675 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data" (OuterVolumeSpecName: "config-data") pod "d238384e-a7ec-4417-8c61-362fb936cd7b" (UID: "d238384e-a7ec-4417-8c61-362fb936cd7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.155974 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sr4m\" (UniqueName: \"kubernetes.io/projected/d238384e-a7ec-4417-8c61-362fb936cd7b-kube-api-access-5sr4m\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.156194 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.156288 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d238384e-a7ec-4417-8c61-362fb936cd7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.373328 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.373384 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.607079 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d238384e-a7ec-4417-8c61-362fb936cd7b","Type":"ContainerDied","Data":"7e92886b5dcea560f69c416ff1912c8e9aca9cdde07ac9ceee6f6a30bfcc6eb0"} Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.607124 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.607167 4982 scope.go:117] "RemoveContainer" containerID="c9c89fb9a34e322ea4bbc68047806ac64669c3e10098526ae5a542c46e969688" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.647769 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.667324 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.683454 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:55 crc kubenswrapper[4982]: E0122 07:21:55.684006 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerName="nova-cell0-conductor-conductor" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.684032 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerName="nova-cell0-conductor-conductor" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.684214 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" containerName="nova-cell0-conductor-conductor" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.684925 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.688651 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.699695 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.737397 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d238384e-a7ec-4417-8c61-362fb936cd7b" path="/var/lib/kubelet/pods/d238384e-a7ec-4417-8c61-362fb936cd7b/volumes" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.769412 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.769861 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xblmt\" (UniqueName: \"kubernetes.io/projected/6ec5622e-1965-4e4e-ab7d-104745c8a065-kube-api-access-xblmt\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.770187 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.872308 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xblmt\" (UniqueName: \"kubernetes.io/projected/6ec5622e-1965-4e4e-ab7d-104745c8a065-kube-api-access-xblmt\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.872450 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.872563 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.879675 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.879881 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ec5622e-1965-4e4e-ab7d-104745c8a065-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:55 crc kubenswrapper[4982]: I0122 07:21:55.894456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xblmt\" (UniqueName: \"kubernetes.io/projected/6ec5622e-1965-4e4e-ab7d-104745c8a065-kube-api-access-xblmt\") pod \"nova-cell0-conductor-0\" (UID: \"6ec5622e-1965-4e4e-ab7d-104745c8a065\") " pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:56 crc kubenswrapper[4982]: I0122 07:21:56.019427 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:56 crc kubenswrapper[4982]: I0122 07:21:56.041064 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 07:21:56 crc kubenswrapper[4982]: I0122 07:21:56.458510 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 07:21:56 crc kubenswrapper[4982]: I0122 07:21:56.615714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6ec5622e-1965-4e4e-ab7d-104745c8a065","Type":"ContainerStarted","Data":"16389ce6e9cd65c49295e7fa10f113ea10140f89d894b7ab2f51364574d90c48"} Jan 22 07:21:57 crc kubenswrapper[4982]: I0122 07:21:57.628881 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6ec5622e-1965-4e4e-ab7d-104745c8a065","Type":"ContainerStarted","Data":"e47d17ae3fe71c59efec8dd2b7ffeeefb96b57db28491e4581dbb3d511af7156"} Jan 22 07:21:57 crc kubenswrapper[4982]: I0122 07:21:57.629256 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 07:21:57 crc kubenswrapper[4982]: I0122 07:21:57.649421 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.6494006839999997 podStartE2EDuration="2.649400684s" podCreationTimestamp="2026-01-22 07:21:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:21:57.644736858 +0000 UTC m=+5778.483374861" watchObservedRunningTime="2026-01-22 07:21:57.649400684 +0000 UTC m=+5778.488038687" Jan 22 07:21:57 crc kubenswrapper[4982]: I0122 07:21:57.805516 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:57 crc kubenswrapper[4982]: I0122 07:21:57.816613 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:21:58 crc kubenswrapper[4982]: I0122 07:21:58.041965 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 07:21:58 crc kubenswrapper[4982]: I0122 07:21:58.648788 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 22 07:22:00 crc kubenswrapper[4982]: I0122 07:22:00.353620 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:22:00 crc kubenswrapper[4982]: I0122 07:22:00.354084 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 07:22:00 crc kubenswrapper[4982]: I0122 07:22:00.379989 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:22:00 crc kubenswrapper[4982]: I0122 07:22:00.390417 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 07:22:01 crc kubenswrapper[4982]: I0122 07:22:01.053829 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 07:22:01 crc kubenswrapper[4982]: I0122 07:22:01.395181 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ad9cd174-57bf-44d9-b5aa-abeccc5bcac8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.79:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:22:01 crc kubenswrapper[4982]: I0122 07:22:01.436130 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ad9cd174-57bf-44d9-b5aa-abeccc5bcac8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.79:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:22:01 crc kubenswrapper[4982]: I0122 07:22:01.519195 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="272317b4-c796-4e4f-9867-1d37a2bf3ac7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:22:01 crc kubenswrapper[4982]: I0122 07:22:01.519708 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="272317b4-c796-4e4f-9867-1d37a2bf3ac7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 07:22:03 crc kubenswrapper[4982]: I0122 07:22:03.041103 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 07:22:03 crc kubenswrapper[4982]: I0122 07:22:03.071123 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 07:22:03 crc kubenswrapper[4982]: I0122 07:22:03.771749 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 07:22:04 crc kubenswrapper[4982]: I0122 07:22:04.939106 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:04 crc kubenswrapper[4982]: I0122 07:22:04.941021 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:04 crc kubenswrapper[4982]: I0122 07:22:04.944830 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 07:22:04 crc kubenswrapper[4982]: I0122 07:22:04.957533 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.097999 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.098039 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.098072 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvlb8\" (UniqueName: \"kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.098095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.098114 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.098326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.200639 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.200784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.200805 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.200845 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvlb8\" (UniqueName: \"kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.201015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.201049 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.201530 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.206771 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.210260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.211920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.214267 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.216353 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvlb8\" (UniqueName: \"kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8\") pod \"cinder-scheduler-0\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.264444 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:05 crc kubenswrapper[4982]: I0122 07:22:05.756744 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:05 crc kubenswrapper[4982]: W0122 07:22:05.760628 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30f471a0_9258_4460_bd4e_f5aa45d817f9.slice/crio-b00acc2b2e1c77f6746f0583e68e657b7ec0b30b495990963815ebe03dc6fcc5 WatchSource:0}: Error finding container b00acc2b2e1c77f6746f0583e68e657b7ec0b30b495990963815ebe03dc6fcc5: Status 404 returned error can't find the container with id b00acc2b2e1c77f6746f0583e68e657b7ec0b30b495990963815ebe03dc6fcc5 Jan 22 07:22:06 crc kubenswrapper[4982]: I0122 07:22:06.698438 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:06 crc kubenswrapper[4982]: I0122 07:22:06.700474 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api-log" containerID="cri-o://912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994" gracePeriod=30 Jan 22 07:22:06 crc kubenswrapper[4982]: I0122 07:22:06.700946 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api" containerID="cri-o://aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475" gracePeriod=30 Jan 22 07:22:06 crc kubenswrapper[4982]: I0122 07:22:06.734195 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerStarted","Data":"e52b4c20de4a2c4a86854bae608e4ee033b92176d52377522e19c658fe4c3e3b"} Jan 22 07:22:06 crc kubenswrapper[4982]: I0122 07:22:06.734242 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerStarted","Data":"b00acc2b2e1c77f6746f0583e68e657b7ec0b30b495990963815ebe03dc6fcc5"} Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.232718 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.234933 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.239463 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.250633 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345514 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345586 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345641 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2p7l\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-kube-api-access-f2p7l\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345676 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345736 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-sys\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345802 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345829 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345881 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.345939 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-dev\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.346015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.346052 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-run\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.346078 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447463 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-sys\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447512 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447548 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447577 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447663 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-dev\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447741 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-run\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447762 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447802 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447911 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447945 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.447974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448028 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2p7l\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-kube-api-access-f2p7l\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448389 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-sys\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.448567 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449475 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449506 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449724 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449784 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-dev\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.449786 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-run\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.454938 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.454976 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.455221 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.455453 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.458759 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.477441 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2p7l\" (UniqueName: \"kubernetes.io/projected/d12a7451-3edb-4f27-ac05-9bf90e55a4c8-kube-api-access-f2p7l\") pod \"cinder-volume-volume1-0\" (UID: \"d12a7451-3edb-4f27-ac05-9bf90e55a4c8\") " pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.552739 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.756443 4982 generic.go:334] "Generic (PLEG): container finished" podID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerID="912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994" exitCode=143 Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.756484 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerDied","Data":"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994"} Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.758487 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerStarted","Data":"d24db64d800ae5ba6001364d720ec54a0e9e25b44847c437e0a584370f10dcb3"} Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.788104 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.7880825270000003 podStartE2EDuration="3.788082527s" podCreationTimestamp="2026-01-22 07:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:22:07.779947827 +0000 UTC m=+5788.618585840" watchObservedRunningTime="2026-01-22 07:22:07.788082527 +0000 UTC m=+5788.626720520" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.953043 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.954871 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.957326 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Jan 22 07:22:07 crc kubenswrapper[4982]: I0122 07:22:07.975593 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.059226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.059532 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgf8c\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-kube-api-access-kgf8c\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.059649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-run\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.059799 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.059951 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-dev\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060060 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060197 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060252 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-ceph\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060434 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060518 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-lib-modules\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060560 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-sys\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060753 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060843 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.060905 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-scripts\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162099 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgf8c\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-kube-api-access-kgf8c\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162148 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-run\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162178 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162203 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-dev\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162218 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162234 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162248 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162278 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-ceph\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162295 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162330 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-lib-modules\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162352 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-sys\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162383 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162415 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162422 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162433 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-scripts\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162473 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162902 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162917 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-dev\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162961 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162926 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162985 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-sys\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162928 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-run\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.162979 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.163005 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.163009 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bbb464cf-fa45-4bec-b621-87a818a02156-lib-modules\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.165574 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jan 22 07:22:08 crc kubenswrapper[4982]: W0122 07:22:08.168625 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd12a7451_3edb_4f27_ac05_9bf90e55a4c8.slice/crio-d3cc091344c87829a7396c7c4d34de15825cb4703ff8279d82339bb699fddeb9 WatchSource:0}: Error finding container d3cc091344c87829a7396c7c4d34de15825cb4703ff8279d82339bb699fddeb9: Status 404 returned error can't find the container with id d3cc091344c87829a7396c7c4d34de15825cb4703ff8279d82339bb699fddeb9 Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.169111 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-scripts\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.169476 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.173587 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-ceph\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.185172 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.187605 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb464cf-fa45-4bec-b621-87a818a02156-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.188039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgf8c\" (UniqueName: \"kubernetes.io/projected/bbb464cf-fa45-4bec-b621-87a818a02156-kube-api-access-kgf8c\") pod \"cinder-backup-0\" (UID: \"bbb464cf-fa45-4bec-b621-87a818a02156\") " pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.274796 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.771523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"d12a7451-3edb-4f27-ac05-9bf90e55a4c8","Type":"ContainerStarted","Data":"d3cc091344c87829a7396c7c4d34de15825cb4703ff8279d82339bb699fddeb9"} Jan 22 07:22:08 crc kubenswrapper[4982]: I0122 07:22:08.836485 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 22 07:22:08 crc kubenswrapper[4982]: W0122 07:22:08.850061 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb464cf_fa45_4bec_b621_87a818a02156.slice/crio-bc05647415bc308f69400118489abf7b71f400293692c913f62c6ae1983b6232 WatchSource:0}: Error finding container bc05647415bc308f69400118489abf7b71f400293692c913f62c6ae1983b6232: Status 404 returned error can't find the container with id bc05647415bc308f69400118489abf7b71f400293692c913f62c6ae1983b6232 Jan 22 07:22:09 crc kubenswrapper[4982]: I0122 07:22:09.795683 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"bbb464cf-fa45-4bec-b621-87a818a02156","Type":"ContainerStarted","Data":"bc05647415bc308f69400118489abf7b71f400293692c913f62c6ae1983b6232"} Jan 22 07:22:09 crc kubenswrapper[4982]: I0122 07:22:09.798159 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"d12a7451-3edb-4f27-ac05-9bf90e55a4c8","Type":"ContainerStarted","Data":"fb74b16cb414032a9c89521491f60d94e75a09c9404e70cbe0c17eaf231ec8d3"} Jan 22 07:22:09 crc kubenswrapper[4982]: I0122 07:22:09.863883 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.77:8776/healthcheck\": read tcp 10.217.0.2:43090->10.217.1.77:8776: read: connection reset by peer" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.265079 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.334186 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.384259 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.384444 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.384738 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.384905 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.388302 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.388655 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.396928 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.396988 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.401968 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469241 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469391 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469434 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bhl7\" (UniqueName: \"kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469469 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469527 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469558 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.469693 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts\") pod \"e418a3f5-6769-46fa-98a9-a74f59f829a6\" (UID: \"e418a3f5-6769-46fa-98a9-a74f59f829a6\") " Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.472915 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.473825 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs" (OuterVolumeSpecName: "logs") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.492577 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7" (OuterVolumeSpecName: "kube-api-access-4bhl7") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "kube-api-access-4bhl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.493053 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts" (OuterVolumeSpecName: "scripts") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.498029 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.547897 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584012 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584051 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584068 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bhl7\" (UniqueName: \"kubernetes.io/projected/e418a3f5-6769-46fa-98a9-a74f59f829a6-kube-api-access-4bhl7\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584082 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e418a3f5-6769-46fa-98a9-a74f59f829a6-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584095 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e418a3f5-6769-46fa-98a9-a74f59f829a6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584106 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.584602 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data" (OuterVolumeSpecName: "config-data") pod "e418a3f5-6769-46fa-98a9-a74f59f829a6" (UID: "e418a3f5-6769-46fa-98a9-a74f59f829a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.686250 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e418a3f5-6769-46fa-98a9-a74f59f829a6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.815043 4982 generic.go:334] "Generic (PLEG): container finished" podID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerID="aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475" exitCode=0 Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.815123 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerDied","Data":"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475"} Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.815156 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e418a3f5-6769-46fa-98a9-a74f59f829a6","Type":"ContainerDied","Data":"26a770e4b9a817540c56c741c7ff4a859b0818b0937a69bed1dcabce7e2700cd"} Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.815177 4982 scope.go:117] "RemoveContainer" containerID="aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.815173 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.823254 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"d12a7451-3edb-4f27-ac05-9bf90e55a4c8","Type":"ContainerStarted","Data":"58336d8a5d2cab4ecc92494567a32ae7a0c6b6ee180da905261467fc985673ba"} Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.870939 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"bbb464cf-fa45-4bec-b621-87a818a02156","Type":"ContainerStarted","Data":"2fc76640d00eda228b77094e0b13fcd9f28778bbc6fd2906534e9862bad1a937"} Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.871271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"bbb464cf-fa45-4bec-b621-87a818a02156","Type":"ContainerStarted","Data":"e17d6ce262443f9bfc40772d274db9b91f80af2bc4b201eb1485d0b30da6dfe1"} Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.879028 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.770728617 podStartE2EDuration="3.879013353s" podCreationTimestamp="2026-01-22 07:22:07 +0000 UTC" firstStartedPulling="2026-01-22 07:22:08.170377788 +0000 UTC m=+5789.009015791" lastFinishedPulling="2026-01-22 07:22:09.278662524 +0000 UTC m=+5790.117300527" observedRunningTime="2026-01-22 07:22:10.876160116 +0000 UTC m=+5791.714798109" watchObservedRunningTime="2026-01-22 07:22:10.879013353 +0000 UTC m=+5791.717651356" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.895437 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.900023 4982 scope.go:117] "RemoveContainer" containerID="912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.925929 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.939216 4982 scope.go:117] "RemoveContainer" containerID="aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475" Jan 22 07:22:10 crc kubenswrapper[4982]: E0122 07:22:10.945634 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475\": container with ID starting with aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475 not found: ID does not exist" containerID="aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.945675 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475"} err="failed to get container status \"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475\": rpc error: code = NotFound desc = could not find container \"aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475\": container with ID starting with aea6a9c3143239e52a67b115c6c9e9b82f9120bd13a091b223ac415ba8fc4475 not found: ID does not exist" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.945699 4982 scope.go:117] "RemoveContainer" containerID="912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994" Jan 22 07:22:10 crc kubenswrapper[4982]: E0122 07:22:10.949742 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994\": container with ID starting with 912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994 not found: ID does not exist" containerID="912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.949781 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994"} err="failed to get container status \"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994\": rpc error: code = NotFound desc = could not find container \"912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994\": container with ID starting with 912aa6c646f840a2c6849e41dc3fa5a4a73ae2fe8596d0c1d6717b5c48de9994 not found: ID does not exist" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.956301 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.981921 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:10 crc kubenswrapper[4982]: E0122 07:22:10.982404 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.982434 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api" Jan 22 07:22:10 crc kubenswrapper[4982]: E0122 07:22:10.982465 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api-log" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.982474 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api-log" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.982681 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.982712 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" containerName="cinder-api-log" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.983799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:22:10 crc kubenswrapper[4982]: I0122 07:22:10.991426 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.027944 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.061463 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.191483868 podStartE2EDuration="4.061442534s" podCreationTimestamp="2026-01-22 07:22:07 +0000 UTC" firstStartedPulling="2026-01-22 07:22:08.851990225 +0000 UTC m=+5789.690628238" lastFinishedPulling="2026-01-22 07:22:09.721948901 +0000 UTC m=+5790.560586904" observedRunningTime="2026-01-22 07:22:10.949398401 +0000 UTC m=+5791.788036404" watchObservedRunningTime="2026-01-22 07:22:11.061442534 +0000 UTC m=+5791.900080537" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/333f6482-96da-4fae-889a-b4fb57b0b089-etc-machine-id\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102655 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2twjb\" (UniqueName: \"kubernetes.io/projected/333f6482-96da-4fae-889a-b4fb57b0b089-kube-api-access-2twjb\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102690 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102713 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data-custom\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102732 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-scripts\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.102786 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333f6482-96da-4fae-889a-b4fb57b0b089-logs\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204600 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data-custom\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204669 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-scripts\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204728 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333f6482-96da-4fae-889a-b4fb57b0b089-logs\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204760 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/333f6482-96da-4fae-889a-b4fb57b0b089-etc-machine-id\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204797 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.204842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2twjb\" (UniqueName: \"kubernetes.io/projected/333f6482-96da-4fae-889a-b4fb57b0b089-kube-api-access-2twjb\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.205500 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333f6482-96da-4fae-889a-b4fb57b0b089-logs\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.205544 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/333f6482-96da-4fae-889a-b4fb57b0b089-etc-machine-id\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.209272 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.209325 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.214370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-config-data-custom\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.214503 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/333f6482-96da-4fae-889a-b4fb57b0b089-scripts\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.223676 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2twjb\" (UniqueName: \"kubernetes.io/projected/333f6482-96da-4fae-889a-b4fb57b0b089-kube-api-access-2twjb\") pod \"cinder-api-0\" (UID: \"333f6482-96da-4fae-889a-b4fb57b0b089\") " pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.324914 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.731010 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e418a3f5-6769-46fa-98a9-a74f59f829a6" path="/var/lib/kubelet/pods/e418a3f5-6769-46fa-98a9-a74f59f829a6/volumes" Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.810829 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 07:22:11 crc kubenswrapper[4982]: I0122 07:22:11.888365 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"333f6482-96da-4fae-889a-b4fb57b0b089","Type":"ContainerStarted","Data":"5f8129fe9f7196760d7bdfa245516e14c370b9a9bf18ce1f9bf5fc7618cdd5d8"} Jan 22 07:22:12 crc kubenswrapper[4982]: I0122 07:22:12.553576 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:12 crc kubenswrapper[4982]: I0122 07:22:12.906984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"333f6482-96da-4fae-889a-b4fb57b0b089","Type":"ContainerStarted","Data":"e529da09b283bca54836f78c6566f6e9121c2d17b5f4569d144cbeefc5b34bde"} Jan 22 07:22:13 crc kubenswrapper[4982]: I0122 07:22:13.275371 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Jan 22 07:22:13 crc kubenswrapper[4982]: I0122 07:22:13.915311 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"333f6482-96da-4fae-889a-b4fb57b0b089","Type":"ContainerStarted","Data":"6879263b589d0b5b0f6cebc8eaa73ea1038c6d40d5e2e51f71033512201a5b63"} Jan 22 07:22:13 crc kubenswrapper[4982]: I0122 07:22:13.915660 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 07:22:13 crc kubenswrapper[4982]: I0122 07:22:13.948388 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.9483662859999997 podStartE2EDuration="3.948366286s" podCreationTimestamp="2026-01-22 07:22:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:22:13.934863062 +0000 UTC m=+5794.773501065" watchObservedRunningTime="2026-01-22 07:22:13.948366286 +0000 UTC m=+5794.787004289" Jan 22 07:22:15 crc kubenswrapper[4982]: I0122 07:22:15.476621 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 07:22:15 crc kubenswrapper[4982]: I0122 07:22:15.540848 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:15 crc kubenswrapper[4982]: I0122 07:22:15.933817 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="cinder-scheduler" containerID="cri-o://e52b4c20de4a2c4a86854bae608e4ee033b92176d52377522e19c658fe4c3e3b" gracePeriod=30 Jan 22 07:22:15 crc kubenswrapper[4982]: I0122 07:22:15.934477 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="probe" containerID="cri-o://d24db64d800ae5ba6001364d720ec54a0e9e25b44847c437e0a584370f10dcb3" gracePeriod=30 Jan 22 07:22:16 crc kubenswrapper[4982]: I0122 07:22:16.944573 4982 generic.go:334] "Generic (PLEG): container finished" podID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerID="d24db64d800ae5ba6001364d720ec54a0e9e25b44847c437e0a584370f10dcb3" exitCode=0 Jan 22 07:22:16 crc kubenswrapper[4982]: I0122 07:22:16.944623 4982 generic.go:334] "Generic (PLEG): container finished" podID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerID="e52b4c20de4a2c4a86854bae608e4ee033b92176d52377522e19c658fe4c3e3b" exitCode=0 Jan 22 07:22:16 crc kubenswrapper[4982]: I0122 07:22:16.944656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerDied","Data":"d24db64d800ae5ba6001364d720ec54a0e9e25b44847c437e0a584370f10dcb3"} Jan 22 07:22:16 crc kubenswrapper[4982]: I0122 07:22:16.944696 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerDied","Data":"e52b4c20de4a2c4a86854bae608e4ee033b92176d52377522e19c658fe4c3e3b"} Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.370271 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.534828 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.534913 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvlb8\" (UniqueName: \"kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.535136 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.535172 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.535208 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.535236 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle\") pod \"30f471a0-9258-4460-bd4e-f5aa45d817f9\" (UID: \"30f471a0-9258-4460-bd4e-f5aa45d817f9\") " Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.535882 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.545605 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8" (OuterVolumeSpecName: "kube-api-access-lvlb8") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "kube-api-access-lvlb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.546101 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts" (OuterVolumeSpecName: "scripts") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.547983 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.589188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.637745 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.637785 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvlb8\" (UniqueName: \"kubernetes.io/projected/30f471a0-9258-4460-bd4e-f5aa45d817f9-kube-api-access-lvlb8\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.637813 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.637821 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30f471a0-9258-4460-bd4e-f5aa45d817f9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.637830 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.657016 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data" (OuterVolumeSpecName: "config-data") pod "30f471a0-9258-4460-bd4e-f5aa45d817f9" (UID: "30f471a0-9258-4460-bd4e-f5aa45d817f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.741048 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f471a0-9258-4460-bd4e-f5aa45d817f9-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.808196 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.957825 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"30f471a0-9258-4460-bd4e-f5aa45d817f9","Type":"ContainerDied","Data":"b00acc2b2e1c77f6746f0583e68e657b7ec0b30b495990963815ebe03dc6fcc5"} Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.957899 4982 scope.go:117] "RemoveContainer" containerID="d24db64d800ae5ba6001364d720ec54a0e9e25b44847c437e0a584370f10dcb3" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.957918 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.978998 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.983172 4982 scope.go:117] "RemoveContainer" containerID="e52b4c20de4a2c4a86854bae608e4ee033b92176d52377522e19c658fe4c3e3b" Jan 22 07:22:17 crc kubenswrapper[4982]: I0122 07:22:17.990032 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.029162 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:18 crc kubenswrapper[4982]: E0122 07:22:18.029733 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="cinder-scheduler" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.029763 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="cinder-scheduler" Jan 22 07:22:18 crc kubenswrapper[4982]: E0122 07:22:18.029786 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="probe" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.029795 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="probe" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.030058 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="probe" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.030087 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" containerName="cinder-scheduler" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.031367 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.038910 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.047251 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.150604 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.150650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.150811 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.150838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkhwt\" (UniqueName: \"kubernetes.io/projected/1e736e92-0f22-4a90-bc7a-7110b5d234a3-kube-api-access-lkhwt\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.150975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e736e92-0f22-4a90-bc7a-7110b5d234a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.151039 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252464 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252512 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252590 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkhwt\" (UniqueName: \"kubernetes.io/projected/1e736e92-0f22-4a90-bc7a-7110b5d234a3-kube-api-access-lkhwt\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252669 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e736e92-0f22-4a90-bc7a-7110b5d234a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.252728 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.253008 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e736e92-0f22-4a90-bc7a-7110b5d234a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.256267 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.256451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.256987 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.264256 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e736e92-0f22-4a90-bc7a-7110b5d234a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.268942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkhwt\" (UniqueName: \"kubernetes.io/projected/1e736e92-0f22-4a90-bc7a-7110b5d234a3-kube-api-access-lkhwt\") pod \"cinder-scheduler-0\" (UID: \"1e736e92-0f22-4a90-bc7a-7110b5d234a3\") " pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.355589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.500981 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.861776 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.972082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1e736e92-0f22-4a90-bc7a-7110b5d234a3","Type":"ContainerStarted","Data":"460479abc5c06d7aa390c34cb1f565c6ebc81f4600c207b717c34f9dd107fc8f"} Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.973603 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.973671 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.973729 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.974593 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:22:18 crc kubenswrapper[4982]: I0122 07:22:18.974681 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08" gracePeriod=600 Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.731545 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30f471a0-9258-4460-bd4e-f5aa45d817f9" path="/var/lib/kubelet/pods/30f471a0-9258-4460-bd4e-f5aa45d817f9/volumes" Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.989226 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1e736e92-0f22-4a90-bc7a-7110b5d234a3","Type":"ContainerStarted","Data":"68ea2113aab12485c040a5a7e05efc779910d41acd40da649875b2ce9ebd12c7"} Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.992179 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08" exitCode=0 Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.992211 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08"} Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.992235 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078"} Jan 22 07:22:19 crc kubenswrapper[4982]: I0122 07:22:19.992254 4982 scope.go:117] "RemoveContainer" containerID="dd1b1f68267d9d0fc8f31fc896f788451cccffbbd47d3265a31a9c82c393becc" Jan 22 07:22:21 crc kubenswrapper[4982]: I0122 07:22:21.033450 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1e736e92-0f22-4a90-bc7a-7110b5d234a3","Type":"ContainerStarted","Data":"f66272dc7a2166381736c5e25a8102f04f8d8a296123e69a8a080c83fd56d4fb"} Jan 22 07:22:22 crc kubenswrapper[4982]: I0122 07:22:22.073436 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.073417663 podStartE2EDuration="5.073417663s" podCreationTimestamp="2026-01-22 07:22:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:22:22.066223829 +0000 UTC m=+5802.904861832" watchObservedRunningTime="2026-01-22 07:22:22.073417663 +0000 UTC m=+5802.912055666" Jan 22 07:22:23 crc kubenswrapper[4982]: I0122 07:22:23.356523 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 07:22:23 crc kubenswrapper[4982]: I0122 07:22:23.363120 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 07:22:28 crc kubenswrapper[4982]: I0122 07:22:28.568828 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 07:23:48 crc kubenswrapper[4982]: I0122 07:23:48.058003 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-v9kjh"] Jan 22 07:23:48 crc kubenswrapper[4982]: I0122 07:23:48.072602 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-93eb-account-create-update-zhh7w"] Jan 22 07:23:48 crc kubenswrapper[4982]: I0122 07:23:48.082913 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-v9kjh"] Jan 22 07:23:48 crc kubenswrapper[4982]: I0122 07:23:48.092270 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-93eb-account-create-update-zhh7w"] Jan 22 07:23:49 crc kubenswrapper[4982]: I0122 07:23:49.730930 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169e4b4a-523d-4bef-b648-a7275adac026" path="/var/lib/kubelet/pods/169e4b4a-523d-4bef-b648-a7275adac026/volumes" Jan 22 07:23:49 crc kubenswrapper[4982]: I0122 07:23:49.731508 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c46f7e2b-0906-4ac7-8ebd-7f0613495430" path="/var/lib/kubelet/pods/c46f7e2b-0906-4ac7-8ebd-7f0613495430/volumes" Jan 22 07:23:56 crc kubenswrapper[4982]: I0122 07:23:56.045634 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-cht8h"] Jan 22 07:23:56 crc kubenswrapper[4982]: I0122 07:23:56.070422 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-cht8h"] Jan 22 07:23:57 crc kubenswrapper[4982]: I0122 07:23:57.741535 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa7ce16f-0a3f-4a8b-9684-271e228ad1e3" path="/var/lib/kubelet/pods/fa7ce16f-0a3f-4a8b-9684-271e228ad1e3/volumes" Jan 22 07:24:04 crc kubenswrapper[4982]: I0122 07:24:04.072825 4982 scope.go:117] "RemoveContainer" containerID="aa9ec77e471a75f91680f04cf86fd9df10e0a53d15bd1850cc5e513c9d7b51d2" Jan 22 07:24:04 crc kubenswrapper[4982]: I0122 07:24:04.095077 4982 scope.go:117] "RemoveContainer" containerID="bd6e4fb9a9bd45d2f71b866e0ce285be9106062b4a5d29e6d4e3df137c42f8a8" Jan 22 07:24:04 crc kubenswrapper[4982]: I0122 07:24:04.150893 4982 scope.go:117] "RemoveContainer" containerID="0134f1ba61dc82f7c6e6b6fdc6b427144bf07b978d7b1049fd7edf7ae7b627e7" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.047048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-pd22k"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.059592 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-pd22k"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.478863 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-twjrm"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.480430 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.482654 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ll88x" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.482934 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.491624 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-qmkb2"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.494420 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.510654 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-twjrm"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.527799 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qmkb2"] Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.609266 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.609348 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4qsc\" (UniqueName: \"kubernetes.io/projected/4b1018e3-51c0-493b-b135-aac6ea8d246c-kube-api-access-m4qsc\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.609396 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-etc-ovs\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.609430 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-run\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.610512 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.610586 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqvpm\" (UniqueName: \"kubernetes.io/projected/da84874a-a3bf-4585-b0ab-08ac6143cd32-kube-api-access-jqvpm\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.610695 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-lib\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.610755 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da84874a-a3bf-4585-b0ab-08ac6143cd32-scripts\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.611253 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-log\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.611548 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b1018e3-51c0-493b-b135-aac6ea8d246c-scripts\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.611612 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-log-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713039 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-lib\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713088 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da84874a-a3bf-4585-b0ab-08ac6143cd32-scripts\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713112 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-log\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713187 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b1018e3-51c0-493b-b135-aac6ea8d246c-scripts\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713213 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-log-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713264 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4qsc\" (UniqueName: \"kubernetes.io/projected/4b1018e3-51c0-493b-b135-aac6ea8d246c-kube-api-access-m4qsc\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713295 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-etc-ovs\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-run\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqvpm\" (UniqueName: \"kubernetes.io/projected/da84874a-a3bf-4585-b0ab-08ac6143cd32-kube-api-access-jqvpm\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713507 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-log\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713545 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713645 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-run\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713630 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-etc-ovs\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713646 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-run\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713695 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/da84874a-a3bf-4585-b0ab-08ac6143cd32-var-lib\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.713646 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4b1018e3-51c0-493b-b135-aac6ea8d246c-var-log-ovn\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.715377 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/da84874a-a3bf-4585-b0ab-08ac6143cd32-scripts\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.716731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b1018e3-51c0-493b-b135-aac6ea8d246c-scripts\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.753107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqvpm\" (UniqueName: \"kubernetes.io/projected/da84874a-a3bf-4585-b0ab-08ac6143cd32-kube-api-access-jqvpm\") pod \"ovn-controller-ovs-qmkb2\" (UID: \"da84874a-a3bf-4585-b0ab-08ac6143cd32\") " pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.753167 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4qsc\" (UniqueName: \"kubernetes.io/projected/4b1018e3-51c0-493b-b135-aac6ea8d246c-kube-api-access-m4qsc\") pod \"ovn-controller-twjrm\" (UID: \"4b1018e3-51c0-493b-b135-aac6ea8d246c\") " pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.802574 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm" Jan 22 07:24:12 crc kubenswrapper[4982]: I0122 07:24:12.827193 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:13 crc kubenswrapper[4982]: I0122 07:24:13.434553 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-twjrm"] Jan 22 07:24:13 crc kubenswrapper[4982]: I0122 07:24:13.772235 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7875445a-de0e-442e-9e16-1933fda0823e" path="/var/lib/kubelet/pods/7875445a-de0e-442e-9e16-1933fda0823e/volumes" Jan 22 07:24:13 crc kubenswrapper[4982]: I0122 07:24:13.775828 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qmkb2"] Jan 22 07:24:13 crc kubenswrapper[4982]: I0122 07:24:13.997127 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tzznv"] Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.003131 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.006476 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.025251 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tzznv"] Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.052077 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovs-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.052140 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovn-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.052164 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/060cb2fa-6324-4520-a00d-6345ef4126fb-config\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.052302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjqst\" (UniqueName: \"kubernetes.io/projected/060cb2fa-6324-4520-a00d-6345ef4126fb-kube-api-access-zjqst\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.154768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovs-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.154871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovn-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.154899 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/060cb2fa-6324-4520-a00d-6345ef4126fb-config\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.155069 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjqst\" (UniqueName: \"kubernetes.io/projected/060cb2fa-6324-4520-a00d-6345ef4126fb-kube-api-access-zjqst\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.155682 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovs-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.155747 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/060cb2fa-6324-4520-a00d-6345ef4126fb-ovn-rundir\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.156385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/060cb2fa-6324-4520-a00d-6345ef4126fb-config\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.196329 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjqst\" (UniqueName: \"kubernetes.io/projected/060cb2fa-6324-4520-a00d-6345ef4126fb-kube-api-access-zjqst\") pod \"ovn-controller-metrics-tzznv\" (UID: \"060cb2fa-6324-4520-a00d-6345ef4126fb\") " pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.205394 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qmkb2" event={"ID":"da84874a-a3bf-4585-b0ab-08ac6143cd32","Type":"ContainerStarted","Data":"208be2a20bbad3f5ca55b8030c3dde704116dbd829d4f03b3b1c18c879f78be9"} Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.205459 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qmkb2" event={"ID":"da84874a-a3bf-4585-b0ab-08ac6143cd32","Type":"ContainerStarted","Data":"ec464bc4977e896e27766f420e52ffcf594dfed269d7743dcb1a7213ed4c0477"} Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.209799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm" event={"ID":"4b1018e3-51c0-493b-b135-aac6ea8d246c","Type":"ContainerStarted","Data":"f37c26d569246cfb21c573945bc73606824a080307824dcf1fea3e54395f18d0"} Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.209886 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm" event={"ID":"4b1018e3-51c0-493b-b135-aac6ea8d246c","Type":"ContainerStarted","Data":"c60037da47395760adc66c568ec450f6613135ef4a88609c7293ac3420f161f8"} Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.210658 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-twjrm" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.266639 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-twjrm" podStartSLOduration=2.266616049 podStartE2EDuration="2.266616049s" podCreationTimestamp="2026-01-22 07:24:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:14.250089113 +0000 UTC m=+5915.088727116" watchObservedRunningTime="2026-01-22 07:24:14.266616049 +0000 UTC m=+5915.105254072" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.336554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tzznv" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.410955 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-l5lqp"] Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.412377 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.438960 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-l5lqp"] Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.460703 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.460821 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl4xb\" (UniqueName: \"kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.563012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl4xb\" (UniqueName: \"kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.563506 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.564409 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.584189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl4xb\" (UniqueName: \"kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb\") pod \"octavia-db-create-l5lqp\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.736549 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:14 crc kubenswrapper[4982]: I0122 07:24:14.908582 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tzznv"] Jan 22 07:24:15 crc kubenswrapper[4982]: I0122 07:24:15.220017 4982 generic.go:334] "Generic (PLEG): container finished" podID="da84874a-a3bf-4585-b0ab-08ac6143cd32" containerID="208be2a20bbad3f5ca55b8030c3dde704116dbd829d4f03b3b1c18c879f78be9" exitCode=0 Jan 22 07:24:15 crc kubenswrapper[4982]: I0122 07:24:15.220104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qmkb2" event={"ID":"da84874a-a3bf-4585-b0ab-08ac6143cd32","Type":"ContainerDied","Data":"208be2a20bbad3f5ca55b8030c3dde704116dbd829d4f03b3b1c18c879f78be9"} Jan 22 07:24:15 crc kubenswrapper[4982]: I0122 07:24:15.224360 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tzznv" event={"ID":"060cb2fa-6324-4520-a00d-6345ef4126fb","Type":"ContainerStarted","Data":"cf243c3a2aac40c65803be73cc9c8d93aa116ff74dfce7213f48b472068e14a1"} Jan 22 07:24:15 crc kubenswrapper[4982]: I0122 07:24:15.385596 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-l5lqp"] Jan 22 07:24:15 crc kubenswrapper[4982]: W0122 07:24:15.398734 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68979bb0_36e1_4068_9700_f3eba7180048.slice/crio-0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb WatchSource:0}: Error finding container 0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb: Status 404 returned error can't find the container with id 0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb Jan 22 07:24:16 crc kubenswrapper[4982]: E0122 07:24:16.037356 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68979bb0_36e1_4068_9700_f3eba7180048.slice/crio-df77a0301e1bd55f81631cddda760c76ee044991b3bf82befd39ad5962baa23e.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.078364 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-8c4e-account-create-update-kbxq5"] Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.085820 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.092256 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.115377 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8c4e-account-create-update-kbxq5"] Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.121540 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.121609 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg9dh\" (UniqueName: \"kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.223999 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.224067 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg9dh\" (UniqueName: \"kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.225341 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.236935 4982 generic.go:334] "Generic (PLEG): container finished" podID="68979bb0-36e1-4068-9700-f3eba7180048" containerID="df77a0301e1bd55f81631cddda760c76ee044991b3bf82befd39ad5962baa23e" exitCode=0 Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.236996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l5lqp" event={"ID":"68979bb0-36e1-4068-9700-f3eba7180048","Type":"ContainerDied","Data":"df77a0301e1bd55f81631cddda760c76ee044991b3bf82befd39ad5962baa23e"} Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.237044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l5lqp" event={"ID":"68979bb0-36e1-4068-9700-f3eba7180048","Type":"ContainerStarted","Data":"0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb"} Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.242174 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qmkb2" event={"ID":"da84874a-a3bf-4585-b0ab-08ac6143cd32","Type":"ContainerStarted","Data":"846bfd8d7f920506cf5e19565c6a5624479173c1eb6e63ea091176d068c92541"} Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.242224 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qmkb2" event={"ID":"da84874a-a3bf-4585-b0ab-08ac6143cd32","Type":"ContainerStarted","Data":"53b67910fb5f35c1b02282bf4504eebc3d269970857f600111dc8bcdee137386"} Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.243475 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.243518 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.246455 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tzznv" event={"ID":"060cb2fa-6324-4520-a00d-6345ef4126fb","Type":"ContainerStarted","Data":"4e335094944d94ad1cf912dbb1a9a778cb744661e8e2e0848949166a33fbf6ec"} Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.268938 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg9dh\" (UniqueName: \"kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh\") pod \"octavia-8c4e-account-create-update-kbxq5\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.288223 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-qmkb2" podStartSLOduration=4.288193238 podStartE2EDuration="4.288193238s" podCreationTimestamp="2026-01-22 07:24:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:16.281007294 +0000 UTC m=+5917.119645307" watchObservedRunningTime="2026-01-22 07:24:16.288193238 +0000 UTC m=+5917.126831241" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.302669 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tzznv" podStartSLOduration=3.302647178 podStartE2EDuration="3.302647178s" podCreationTimestamp="2026-01-22 07:24:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:16.295254689 +0000 UTC m=+5917.133892692" watchObservedRunningTime="2026-01-22 07:24:16.302647178 +0000 UTC m=+5917.141285181" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.429584 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:16 crc kubenswrapper[4982]: I0122 07:24:16.869309 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8c4e-account-create-update-kbxq5"] Jan 22 07:24:16 crc kubenswrapper[4982]: W0122 07:24:16.877651 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod063d2222_26e8_4ffb_8210_e198da9b286b.slice/crio-a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4 WatchSource:0}: Error finding container a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4: Status 404 returned error can't find the container with id a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4 Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.257425 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8c4e-account-create-update-kbxq5" event={"ID":"063d2222-26e8-4ffb-8210-e198da9b286b","Type":"ContainerStarted","Data":"a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4"} Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.624622 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.649266 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts\") pod \"68979bb0-36e1-4068-9700-f3eba7180048\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.649510 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl4xb\" (UniqueName: \"kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb\") pod \"68979bb0-36e1-4068-9700-f3eba7180048\" (UID: \"68979bb0-36e1-4068-9700-f3eba7180048\") " Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.650207 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "68979bb0-36e1-4068-9700-f3eba7180048" (UID: "68979bb0-36e1-4068-9700-f3eba7180048"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.666188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb" (OuterVolumeSpecName: "kube-api-access-wl4xb") pod "68979bb0-36e1-4068-9700-f3eba7180048" (UID: "68979bb0-36e1-4068-9700-f3eba7180048"). InnerVolumeSpecName "kube-api-access-wl4xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.752569 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl4xb\" (UniqueName: \"kubernetes.io/projected/68979bb0-36e1-4068-9700-f3eba7180048-kube-api-access-wl4xb\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:17 crc kubenswrapper[4982]: I0122 07:24:17.752610 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68979bb0-36e1-4068-9700-f3eba7180048-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:18 crc kubenswrapper[4982]: I0122 07:24:18.268687 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-l5lqp" Jan 22 07:24:18 crc kubenswrapper[4982]: I0122 07:24:18.268684 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-l5lqp" event={"ID":"68979bb0-36e1-4068-9700-f3eba7180048","Type":"ContainerDied","Data":"0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb"} Jan 22 07:24:18 crc kubenswrapper[4982]: I0122 07:24:18.269168 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c7b52d15395687acfb5401cd22ac02ddf0f6cace0d92818015fad8ff144daeb" Jan 22 07:24:18 crc kubenswrapper[4982]: I0122 07:24:18.270369 4982 generic.go:334] "Generic (PLEG): container finished" podID="063d2222-26e8-4ffb-8210-e198da9b286b" containerID="e5832f192dd1ff64fc934156e7f28e9c048c261a0b1fd685deb2d242782c1dff" exitCode=0 Jan 22 07:24:18 crc kubenswrapper[4982]: I0122 07:24:18.270399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8c4e-account-create-update-kbxq5" event={"ID":"063d2222-26e8-4ffb-8210-e198da9b286b","Type":"ContainerDied","Data":"e5832f192dd1ff64fc934156e7f28e9c048c261a0b1fd685deb2d242782c1dff"} Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.651262 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.796934 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts\") pod \"063d2222-26e8-4ffb-8210-e198da9b286b\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.797024 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg9dh\" (UniqueName: \"kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh\") pod \"063d2222-26e8-4ffb-8210-e198da9b286b\" (UID: \"063d2222-26e8-4ffb-8210-e198da9b286b\") " Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.797576 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "063d2222-26e8-4ffb-8210-e198da9b286b" (UID: "063d2222-26e8-4ffb-8210-e198da9b286b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.802659 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh" (OuterVolumeSpecName: "kube-api-access-jg9dh") pod "063d2222-26e8-4ffb-8210-e198da9b286b" (UID: "063d2222-26e8-4ffb-8210-e198da9b286b"). InnerVolumeSpecName "kube-api-access-jg9dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.898787 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg9dh\" (UniqueName: \"kubernetes.io/projected/063d2222-26e8-4ffb-8210-e198da9b286b-kube-api-access-jg9dh\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:19 crc kubenswrapper[4982]: I0122 07:24:19.898827 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063d2222-26e8-4ffb-8210-e198da9b286b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:20 crc kubenswrapper[4982]: I0122 07:24:20.297520 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8c4e-account-create-update-kbxq5" event={"ID":"063d2222-26e8-4ffb-8210-e198da9b286b","Type":"ContainerDied","Data":"a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4"} Jan 22 07:24:20 crc kubenswrapper[4982]: I0122 07:24:20.297845 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a217477e7bc3ef13c618facf6002f949c0a27a0af9cc61bfeaa620c42a9972c4" Jan 22 07:24:20 crc kubenswrapper[4982]: I0122 07:24:20.297610 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8c4e-account-create-update-kbxq5" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.047015 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-kxk2n"] Jan 22 07:24:22 crc kubenswrapper[4982]: E0122 07:24:22.047369 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68979bb0-36e1-4068-9700-f3eba7180048" containerName="mariadb-database-create" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.047381 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="68979bb0-36e1-4068-9700-f3eba7180048" containerName="mariadb-database-create" Jan 22 07:24:22 crc kubenswrapper[4982]: E0122 07:24:22.047391 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="063d2222-26e8-4ffb-8210-e198da9b286b" containerName="mariadb-account-create-update" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.047397 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="063d2222-26e8-4ffb-8210-e198da9b286b" containerName="mariadb-account-create-update" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.047574 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="063d2222-26e8-4ffb-8210-e198da9b286b" containerName="mariadb-account-create-update" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.047586 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="68979bb0-36e1-4068-9700-f3eba7180048" containerName="mariadb-database-create" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.048203 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.066574 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-kxk2n"] Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.138780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.139131 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmrdz\" (UniqueName: \"kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.242348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.242453 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmrdz\" (UniqueName: \"kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.243279 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.264812 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmrdz\" (UniqueName: \"kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz\") pod \"octavia-persistence-db-create-kxk2n\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.365506 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.743338 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-fc79-account-create-update-s6n7r"] Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.744663 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.746557 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.753831 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-fc79-account-create-update-s6n7r"] Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.754129 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.754288 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n748c\" (UniqueName: \"kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.855312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.855409 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n748c\" (UniqueName: \"kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.856400 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.856439 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-kxk2n"] Jan 22 07:24:22 crc kubenswrapper[4982]: I0122 07:24:22.874308 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n748c\" (UniqueName: \"kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c\") pod \"octavia-fc79-account-create-update-s6n7r\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:23 crc kubenswrapper[4982]: I0122 07:24:23.123703 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:23 crc kubenswrapper[4982]: I0122 07:24:23.323255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-kxk2n" event={"ID":"198a37c2-63fa-4483-8874-af94a9f1f5de","Type":"ContainerStarted","Data":"98326b12ee68bb959a6916ffaba5f40ea6eb980e0c8a1e34f426a5e0ae051def"} Jan 22 07:24:23 crc kubenswrapper[4982]: I0122 07:24:23.323319 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-kxk2n" event={"ID":"198a37c2-63fa-4483-8874-af94a9f1f5de","Type":"ContainerStarted","Data":"6d9fab1d71a55d47455538c32d5f3eb5611ae5d09598e5043dc2bc940d411b85"} Jan 22 07:24:23 crc kubenswrapper[4982]: I0122 07:24:23.355417 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-persistence-db-create-kxk2n" podStartSLOduration=1.355392604 podStartE2EDuration="1.355392604s" podCreationTimestamp="2026-01-22 07:24:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:23.344431548 +0000 UTC m=+5924.183069551" watchObservedRunningTime="2026-01-22 07:24:23.355392604 +0000 UTC m=+5924.194030637" Jan 22 07:24:23 crc kubenswrapper[4982]: I0122 07:24:23.562034 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-fc79-account-create-update-s6n7r"] Jan 22 07:24:24 crc kubenswrapper[4982]: I0122 07:24:24.332420 4982 generic.go:334] "Generic (PLEG): container finished" podID="198a37c2-63fa-4483-8874-af94a9f1f5de" containerID="98326b12ee68bb959a6916ffaba5f40ea6eb980e0c8a1e34f426a5e0ae051def" exitCode=0 Jan 22 07:24:24 crc kubenswrapper[4982]: I0122 07:24:24.332698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-kxk2n" event={"ID":"198a37c2-63fa-4483-8874-af94a9f1f5de","Type":"ContainerDied","Data":"98326b12ee68bb959a6916ffaba5f40ea6eb980e0c8a1e34f426a5e0ae051def"} Jan 22 07:24:24 crc kubenswrapper[4982]: I0122 07:24:24.333919 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-fc79-account-create-update-s6n7r" event={"ID":"72af17c0-2e69-455f-8418-bff72b08046b","Type":"ContainerStarted","Data":"2c38061cee1e806912955dd7af5909ad2b4d966df02f1c26d1c36cd41ba34912"} Jan 22 07:24:24 crc kubenswrapper[4982]: I0122 07:24:24.333972 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-fc79-account-create-update-s6n7r" event={"ID":"72af17c0-2e69-455f-8418-bff72b08046b","Type":"ContainerStarted","Data":"28d24f78a0609f216f3804a38bf03d9884affdcdbc8fd0b603b5815c67041a7d"} Jan 22 07:24:24 crc kubenswrapper[4982]: I0122 07:24:24.372261 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-fc79-account-create-update-s6n7r" podStartSLOduration=2.372243072 podStartE2EDuration="2.372243072s" podCreationTimestamp="2026-01-22 07:24:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:24.363451515 +0000 UTC m=+5925.202089518" watchObservedRunningTime="2026-01-22 07:24:24.372243072 +0000 UTC m=+5925.210881075" Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.344968 4982 generic.go:334] "Generic (PLEG): container finished" podID="72af17c0-2e69-455f-8418-bff72b08046b" containerID="2c38061cee1e806912955dd7af5909ad2b4d966df02f1c26d1c36cd41ba34912" exitCode=0 Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.345064 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-fc79-account-create-update-s6n7r" event={"ID":"72af17c0-2e69-455f-8418-bff72b08046b","Type":"ContainerDied","Data":"2c38061cee1e806912955dd7af5909ad2b4d966df02f1c26d1c36cd41ba34912"} Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.744977 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.912172 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts\") pod \"198a37c2-63fa-4483-8874-af94a9f1f5de\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.912923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "198a37c2-63fa-4483-8874-af94a9f1f5de" (UID: "198a37c2-63fa-4483-8874-af94a9f1f5de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.912995 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmrdz\" (UniqueName: \"kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz\") pod \"198a37c2-63fa-4483-8874-af94a9f1f5de\" (UID: \"198a37c2-63fa-4483-8874-af94a9f1f5de\") " Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.916958 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/198a37c2-63fa-4483-8874-af94a9f1f5de-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:25 crc kubenswrapper[4982]: I0122 07:24:25.922532 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz" (OuterVolumeSpecName: "kube-api-access-nmrdz") pod "198a37c2-63fa-4483-8874-af94a9f1f5de" (UID: "198a37c2-63fa-4483-8874-af94a9f1f5de"). InnerVolumeSpecName "kube-api-access-nmrdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.018715 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmrdz\" (UniqueName: \"kubernetes.io/projected/198a37c2-63fa-4483-8874-af94a9f1f5de-kube-api-access-nmrdz\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.354487 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-kxk2n" event={"ID":"198a37c2-63fa-4483-8874-af94a9f1f5de","Type":"ContainerDied","Data":"6d9fab1d71a55d47455538c32d5f3eb5611ae5d09598e5043dc2bc940d411b85"} Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.354722 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d9fab1d71a55d47455538c32d5f3eb5611ae5d09598e5043dc2bc940d411b85" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.354804 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-kxk2n" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.748030 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.839301 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts\") pod \"72af17c0-2e69-455f-8418-bff72b08046b\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.839402 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n748c\" (UniqueName: \"kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c\") pod \"72af17c0-2e69-455f-8418-bff72b08046b\" (UID: \"72af17c0-2e69-455f-8418-bff72b08046b\") " Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.840377 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72af17c0-2e69-455f-8418-bff72b08046b" (UID: "72af17c0-2e69-455f-8418-bff72b08046b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.844573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c" (OuterVolumeSpecName: "kube-api-access-n748c") pod "72af17c0-2e69-455f-8418-bff72b08046b" (UID: "72af17c0-2e69-455f-8418-bff72b08046b"). InnerVolumeSpecName "kube-api-access-n748c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.940788 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72af17c0-2e69-455f-8418-bff72b08046b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:26 crc kubenswrapper[4982]: I0122 07:24:26.940821 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n748c\" (UniqueName: \"kubernetes.io/projected/72af17c0-2e69-455f-8418-bff72b08046b-kube-api-access-n748c\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:27 crc kubenswrapper[4982]: I0122 07:24:27.363672 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-fc79-account-create-update-s6n7r" event={"ID":"72af17c0-2e69-455f-8418-bff72b08046b","Type":"ContainerDied","Data":"28d24f78a0609f216f3804a38bf03d9884affdcdbc8fd0b603b5815c67041a7d"} Jan 22 07:24:27 crc kubenswrapper[4982]: I0122 07:24:27.364158 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28d24f78a0609f216f3804a38bf03d9884affdcdbc8fd0b603b5815c67041a7d" Jan 22 07:24:27 crc kubenswrapper[4982]: I0122 07:24:27.363709 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-fc79-account-create-update-s6n7r" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.385955 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-6747f58b9b-2xqnm"] Jan 22 07:24:34 crc kubenswrapper[4982]: E0122 07:24:34.386816 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72af17c0-2e69-455f-8418-bff72b08046b" containerName="mariadb-account-create-update" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.386830 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="72af17c0-2e69-455f-8418-bff72b08046b" containerName="mariadb-account-create-update" Jan 22 07:24:34 crc kubenswrapper[4982]: E0122 07:24:34.386877 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="198a37c2-63fa-4483-8874-af94a9f1f5de" containerName="mariadb-database-create" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.386884 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="198a37c2-63fa-4483-8874-af94a9f1f5de" containerName="mariadb-database-create" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.388154 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="72af17c0-2e69-455f-8418-bff72b08046b" containerName="mariadb-account-create-update" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.388200 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="198a37c2-63fa-4483-8874-af94a9f1f5de" containerName="mariadb-database-create" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.412168 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.414352 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.418633 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-5b244" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.419243 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.426276 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6747f58b9b-2xqnm"] Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.590535 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-octavia-run\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.590612 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-scripts\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.590874 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.591049 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-combined-ca-bundle\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.591681 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data-merged\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.692912 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-combined-ca-bundle\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data-merged\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693102 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-octavia-run\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693145 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-scripts\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693200 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693594 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data-merged\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.693655 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4e1c0c86-a174-408f-bc49-d4cfca199c8a-octavia-run\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.707747 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-combined-ca-bundle\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.709471 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-scripts\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.709703 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e1c0c86-a174-408f-bc49-d4cfca199c8a-config-data\") pod \"octavia-api-6747f58b9b-2xqnm\" (UID: \"4e1c0c86-a174-408f-bc49-d4cfca199c8a\") " pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:34 crc kubenswrapper[4982]: I0122 07:24:34.737073 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:35 crc kubenswrapper[4982]: I0122 07:24:35.224343 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6747f58b9b-2xqnm"] Jan 22 07:24:35 crc kubenswrapper[4982]: I0122 07:24:35.452819 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6747f58b9b-2xqnm" event={"ID":"4e1c0c86-a174-408f-bc49-d4cfca199c8a","Type":"ContainerStarted","Data":"49d72af9cced0c08220ae524ebd4cdd987dfda114ae4d8637afe642d356a4e6a"} Jan 22 07:24:46 crc kubenswrapper[4982]: I0122 07:24:46.557402 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6747f58b9b-2xqnm" event={"ID":"4e1c0c86-a174-408f-bc49-d4cfca199c8a","Type":"ContainerStarted","Data":"d9a881fc8c5c7674424ec3e7f011fb1ed437db829e41e468a44a2c6c8cbc7c27"} Jan 22 07:24:46 crc kubenswrapper[4982]: E0122 07:24:46.838539 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e1c0c86_a174_408f_bc49_d4cfca199c8a.slice/crio-conmon-d9a881fc8c5c7674424ec3e7f011fb1ed437db829e41e468a44a2c6c8cbc7c27.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:24:47 crc kubenswrapper[4982]: I0122 07:24:47.568469 4982 generic.go:334] "Generic (PLEG): container finished" podID="4e1c0c86-a174-408f-bc49-d4cfca199c8a" containerID="d9a881fc8c5c7674424ec3e7f011fb1ed437db829e41e468a44a2c6c8cbc7c27" exitCode=0 Jan 22 07:24:47 crc kubenswrapper[4982]: I0122 07:24:47.568659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6747f58b9b-2xqnm" event={"ID":"4e1c0c86-a174-408f-bc49-d4cfca199c8a","Type":"ContainerDied","Data":"d9a881fc8c5c7674424ec3e7f011fb1ed437db829e41e468a44a2c6c8cbc7c27"} Jan 22 07:24:47 crc kubenswrapper[4982]: I0122 07:24:47.841646 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-twjrm" podUID="4b1018e3-51c0-493b-b135-aac6ea8d246c" containerName="ovn-controller" probeResult="failure" output=< Jan 22 07:24:47 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 22 07:24:47 crc kubenswrapper[4982]: > Jan 22 07:24:47 crc kubenswrapper[4982]: I0122 07:24:47.868385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:47 crc kubenswrapper[4982]: I0122 07:24:47.869012 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qmkb2" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.040643 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-twjrm-config-grndc"] Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.054876 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-twjrm-config-grndc"] Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.054974 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.066769 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184062 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184141 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184217 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpqpr\" (UniqueName: \"kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.184363 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.286099 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.287028 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.287112 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.287321 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.287740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289431 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289504 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289613 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289720 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289755 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpqpr\" (UniqueName: \"kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.289860 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.326716 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpqpr\" (UniqueName: \"kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr\") pod \"ovn-controller-twjrm-config-grndc\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.400309 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.609963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6747f58b9b-2xqnm" event={"ID":"4e1c0c86-a174-408f-bc49-d4cfca199c8a","Type":"ContainerStarted","Data":"f82b34adf8b8f72d43c917e6c88422dd2f0a9fa403094a47e5db7db1d384c37d"} Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.974441 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.974845 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:24:48 crc kubenswrapper[4982]: I0122 07:24:48.990886 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-twjrm-config-grndc"] Jan 22 07:24:49 crc kubenswrapper[4982]: W0122 07:24:49.013018 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f588293_fa3f_4b9f_8c7f_aa3483e3d952.slice/crio-2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257 WatchSource:0}: Error finding container 2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257: Status 404 returned error can't find the container with id 2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257 Jan 22 07:24:49 crc kubenswrapper[4982]: I0122 07:24:49.620616 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm-config-grndc" event={"ID":"1f588293-fa3f-4b9f-8c7f-aa3483e3d952","Type":"ContainerStarted","Data":"2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257"} Jan 22 07:24:49 crc kubenswrapper[4982]: I0122 07:24:49.624043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6747f58b9b-2xqnm" event={"ID":"4e1c0c86-a174-408f-bc49-d4cfca199c8a","Type":"ContainerStarted","Data":"d2f57a75cca16537cb1368d515ebe0577e9080c4448f24dde058347258d3c88d"} Jan 22 07:24:49 crc kubenswrapper[4982]: I0122 07:24:49.624370 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:49 crc kubenswrapper[4982]: I0122 07:24:49.624411 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:24:49 crc kubenswrapper[4982]: I0122 07:24:49.645193 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-6747f58b9b-2xqnm" podStartSLOduration=4.778057322 podStartE2EDuration="15.645172307s" podCreationTimestamp="2026-01-22 07:24:34 +0000 UTC" firstStartedPulling="2026-01-22 07:24:35.230894148 +0000 UTC m=+5936.069532151" lastFinishedPulling="2026-01-22 07:24:46.098009133 +0000 UTC m=+5946.936647136" observedRunningTime="2026-01-22 07:24:49.642630148 +0000 UTC m=+5950.481268161" watchObservedRunningTime="2026-01-22 07:24:49.645172307 +0000 UTC m=+5950.483810310" Jan 22 07:24:52 crc kubenswrapper[4982]: I0122 07:24:52.861673 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-twjrm" podUID="4b1018e3-51c0-493b-b135-aac6ea8d246c" containerName="ovn-controller" probeResult="failure" output=< Jan 22 07:24:52 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 22 07:24:52 crc kubenswrapper[4982]: > Jan 22 07:24:53 crc kubenswrapper[4982]: I0122 07:24:53.659659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm-config-grndc" event={"ID":"1f588293-fa3f-4b9f-8c7f-aa3483e3d952","Type":"ContainerStarted","Data":"b35ddf4ca6a5a5256d6181255cb43f80c16cdc208c5f2fba1aa41b0724f2973f"} Jan 22 07:24:53 crc kubenswrapper[4982]: I0122 07:24:53.680047 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-twjrm-config-grndc" podStartSLOduration=5.680027921 podStartE2EDuration="5.680027921s" podCreationTimestamp="2026-01-22 07:24:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:24:53.678173911 +0000 UTC m=+5954.516811944" watchObservedRunningTime="2026-01-22 07:24:53.680027921 +0000 UTC m=+5954.518665924" Jan 22 07:24:54 crc kubenswrapper[4982]: I0122 07:24:54.670243 4982 generic.go:334] "Generic (PLEG): container finished" podID="1f588293-fa3f-4b9f-8c7f-aa3483e3d952" containerID="b35ddf4ca6a5a5256d6181255cb43f80c16cdc208c5f2fba1aa41b0724f2973f" exitCode=0 Jan 22 07:24:54 crc kubenswrapper[4982]: I0122 07:24:54.670298 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm-config-grndc" event={"ID":"1f588293-fa3f-4b9f-8c7f-aa3483e3d952","Type":"ContainerDied","Data":"b35ddf4ca6a5a5256d6181255cb43f80c16cdc208c5f2fba1aa41b0724f2973f"} Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.142553 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259591 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259623 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259681 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.259957 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpqpr\" (UniqueName: \"kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260114 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run\") pod \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\" (UID: \"1f588293-fa3f-4b9f-8c7f-aa3483e3d952\") " Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260298 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run" (OuterVolumeSpecName: "var-run") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260559 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260813 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260838 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260874 4982 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.260889 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.261190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts" (OuterVolumeSpecName: "scripts") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.274008 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr" (OuterVolumeSpecName: "kube-api-access-dpqpr") pod "1f588293-fa3f-4b9f-8c7f-aa3483e3d952" (UID: "1f588293-fa3f-4b9f-8c7f-aa3483e3d952"). InnerVolumeSpecName "kube-api-access-dpqpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.362713 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.362749 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpqpr\" (UniqueName: \"kubernetes.io/projected/1f588293-fa3f-4b9f-8c7f-aa3483e3d952-kube-api-access-dpqpr\") on node \"crc\" DevicePath \"\"" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.695247 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-twjrm-config-grndc" event={"ID":"1f588293-fa3f-4b9f-8c7f-aa3483e3d952","Type":"ContainerDied","Data":"2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257"} Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.695293 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2199c617b5a200ee5bc1c2af41b916af6f5c17628cff6b6dbbb40f3f460c8257" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.695361 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-twjrm-config-grndc" Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.773364 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-twjrm-config-grndc"] Jan 22 07:24:56 crc kubenswrapper[4982]: I0122 07:24:56.784922 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-twjrm-config-grndc"] Jan 22 07:24:57 crc kubenswrapper[4982]: I0122 07:24:57.739257 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f588293-fa3f-4b9f-8c7f-aa3483e3d952" path="/var/lib/kubelet/pods/1f588293-fa3f-4b9f-8c7f-aa3483e3d952/volumes" Jan 22 07:24:57 crc kubenswrapper[4982]: I0122 07:24:57.860600 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-twjrm" Jan 22 07:25:04 crc kubenswrapper[4982]: I0122 07:25:04.243047 4982 scope.go:117] "RemoveContainer" containerID="f5cd44ff5f870adef672c322952c2739c2eab112842978eb06f6db5aa5b2fe21" Jan 22 07:25:09 crc kubenswrapper[4982]: I0122 07:25:09.583502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:25:09 crc kubenswrapper[4982]: I0122 07:25:09.651278 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6747f58b9b-2xqnm" Jan 22 07:25:18 crc kubenswrapper[4982]: I0122 07:25:18.974475 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:25:18 crc kubenswrapper[4982]: I0122 07:25:18.975090 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.677173 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-5jnkz"] Jan 22 07:25:25 crc kubenswrapper[4982]: E0122 07:25:25.678529 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f588293-fa3f-4b9f-8c7f-aa3483e3d952" containerName="ovn-config" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.678545 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f588293-fa3f-4b9f-8c7f-aa3483e3d952" containerName="ovn-config" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.678716 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f588293-fa3f-4b9f-8c7f-aa3483e3d952" containerName="ovn-config" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.679702 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.682692 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.682755 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.683021 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.701270 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-5jnkz"] Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.825575 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data-merged\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.825650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-scripts\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.826019 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4475df5c-51e9-47ec-9ff8-6030b5b7377e-hm-ports\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.826447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.928640 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.928708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data-merged\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.928742 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-scripts\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.928813 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4475df5c-51e9-47ec-9ff8-6030b5b7377e-hm-ports\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.929221 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data-merged\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.929642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4475df5c-51e9-47ec-9ff8-6030b5b7377e-hm-ports\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.936060 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-scripts\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:25 crc kubenswrapper[4982]: I0122 07:25:25.937216 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4475df5c-51e9-47ec-9ff8-6030b5b7377e-config-data\") pod \"octavia-rsyslog-5jnkz\" (UID: \"4475df5c-51e9-47ec-9ff8-6030b5b7377e\") " pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.012255 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.346069 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.350373 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.354778 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.371876 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.441946 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.441999 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.544404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.544460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.545476 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.553689 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-kkzhq\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.632255 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-5jnkz"] Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.635695 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.686957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:25:26 crc kubenswrapper[4982]: I0122 07:25:26.769346 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-5jnkz"] Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.010918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-5jnkz" event={"ID":"4475df5c-51e9-47ec-9ff8-6030b5b7377e","Type":"ContainerStarted","Data":"7970d92e68407789a3319291602571a42759e19a709ac1ba7645a5f50063c02b"} Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.336347 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:25:27 crc kubenswrapper[4982]: W0122 07:25:27.352137 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb83000aa_7a72_4f5c_976d_4716c51c0070.slice/crio-fe9ac7993537148031348691a8d97ee7112ec960b6e74f6243815a5387173ecd WatchSource:0}: Error finding container fe9ac7993537148031348691a8d97ee7112ec960b6e74f6243815a5387173ecd: Status 404 returned error can't find the container with id fe9ac7993537148031348691a8d97ee7112ec960b6e74f6243815a5387173ecd Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.503183 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-9w225"] Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.505609 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.526338 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.553062 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-9w225"] Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.675289 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.675542 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.675577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.675604 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.801029 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.801263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.801309 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.801341 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.803661 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.815311 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.817310 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.836420 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data\") pod \"octavia-db-sync-9w225\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:27 crc kubenswrapper[4982]: I0122 07:25:27.850410 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-9w225" Jan 22 07:25:28 crc kubenswrapper[4982]: I0122 07:25:28.026222 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerStarted","Data":"fe9ac7993537148031348691a8d97ee7112ec960b6e74f6243815a5387173ecd"} Jan 22 07:25:28 crc kubenswrapper[4982]: I0122 07:25:28.550390 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-9w225"] Jan 22 07:25:30 crc kubenswrapper[4982]: I0122 07:25:30.047791 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-9w225" event={"ID":"71ed9098-7666-4407-9354-f68f4a17c806","Type":"ContainerStarted","Data":"5af9e2ba17a58c69b7e9fd4b3ee1c3fb67d181e1104881e0c6d3c3ef07095a19"} Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.961839 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-k7x9x"] Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.965500 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.973269 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.973745 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.973685 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Jan 22 07:25:31 crc kubenswrapper[4982]: I0122 07:25:31.977023 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-k7x9x"] Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.139916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-scripts\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.140079 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data-merged\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.140144 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-amphora-certs\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.140229 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.140356 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/a6ef98c3-bddb-469d-b06d-7457555f44b9-hm-ports\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.140552 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-combined-ca-bundle\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-amphora-certs\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244634 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/a6ef98c3-bddb-469d-b06d-7457555f44b9-hm-ports\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244706 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-combined-ca-bundle\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244799 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-scripts\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.244935 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data-merged\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.245427 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data-merged\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.245641 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/a6ef98c3-bddb-469d-b06d-7457555f44b9-hm-ports\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.250260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-amphora-certs\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.250572 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-scripts\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.252570 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-config-data\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.259187 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6ef98c3-bddb-469d-b06d-7457555f44b9-combined-ca-bundle\") pod \"octavia-healthmanager-k7x9x\" (UID: \"a6ef98c3-bddb-469d-b06d-7457555f44b9\") " pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:32 crc kubenswrapper[4982]: I0122 07:25:32.327900 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.804073 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-zn9sp"] Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.805836 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.813731 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.813944 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.821717 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-zn9sp"] Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.977708 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-combined-ca-bundle\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.977790 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e995cc53-c18b-4c88-b635-c5ea5798b026-hm-ports\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.977915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-amphora-certs\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.977969 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data-merged\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.978078 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:33 crc kubenswrapper[4982]: I0122 07:25:33.978236 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-scripts\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.079581 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-combined-ca-bundle\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.079662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e995cc53-c18b-4c88-b635-c5ea5798b026-hm-ports\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.079757 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-amphora-certs\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.079787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data-merged\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.079915 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.080069 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-scripts\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.081949 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data-merged\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.082685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e995cc53-c18b-4c88-b635-c5ea5798b026-hm-ports\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.086530 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-combined-ca-bundle\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.086572 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-config-data\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.086866 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-amphora-certs\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.087886 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e995cc53-c18b-4c88-b635-c5ea5798b026-scripts\") pod \"octavia-housekeeping-zn9sp\" (UID: \"e995cc53-c18b-4c88-b635-c5ea5798b026\") " pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.127099 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.814847 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-9swd4"] Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.816781 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.821554 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.822049 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.827055 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-9swd4"] Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893069 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data-merged\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-combined-ca-bundle\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893500 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ce14677e-7c96-4c5c-a9a8-76747e002f27-hm-ports\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-amphora-certs\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893610 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.893705 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-scripts\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data-merged\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-combined-ca-bundle\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ce14677e-7c96-4c5c-a9a8-76747e002f27-hm-ports\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-amphora-certs\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995518 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.995565 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-scripts\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.997260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ce14677e-7c96-4c5c-a9a8-76747e002f27-hm-ports\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:34 crc kubenswrapper[4982]: I0122 07:25:34.997517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data-merged\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:35 crc kubenswrapper[4982]: I0122 07:25:35.001842 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-amphora-certs\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:35 crc kubenswrapper[4982]: I0122 07:25:35.002662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-scripts\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:35 crc kubenswrapper[4982]: I0122 07:25:35.004120 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-config-data\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:35 crc kubenswrapper[4982]: I0122 07:25:35.009044 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce14677e-7c96-4c5c-a9a8-76747e002f27-combined-ca-bundle\") pod \"octavia-worker-9swd4\" (UID: \"ce14677e-7c96-4c5c-a9a8-76747e002f27\") " pod="openstack/octavia-worker-9swd4" Jan 22 07:25:35 crc kubenswrapper[4982]: I0122 07:25:35.136796 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-9swd4" Jan 22 07:25:39 crc kubenswrapper[4982]: I0122 07:25:39.223356 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-9swd4"] Jan 22 07:25:39 crc kubenswrapper[4982]: I0122 07:25:39.535118 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-k7x9x"] Jan 22 07:25:39 crc kubenswrapper[4982]: I0122 07:25:39.703484 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-k7x9x"] Jan 22 07:25:48 crc kubenswrapper[4982]: I0122 07:25:48.973838 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:25:48 crc kubenswrapper[4982]: I0122 07:25:48.974350 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:25:48 crc kubenswrapper[4982]: I0122 07:25:48.974395 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:25:48 crc kubenswrapper[4982]: I0122 07:25:48.975188 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:25:48 crc kubenswrapper[4982]: I0122 07:25:48.975253 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" gracePeriod=600 Jan 22 07:25:56 crc kubenswrapper[4982]: W0122 07:25:56.128315 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce14677e_7c96_4c5c_a9a8_76747e002f27.slice/crio-292655a2236d9296e6a66ea12db28aa96614f86b789fa42d2188039f81a6f4fe WatchSource:0}: Error finding container 292655a2236d9296e6a66ea12db28aa96614f86b789fa42d2188039f81a6f4fe: Status 404 returned error can't find the container with id 292655a2236d9296e6a66ea12db28aa96614f86b789fa42d2188039f81a6f4fe Jan 22 07:25:56 crc kubenswrapper[4982]: E0122 07:25:56.289204 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.324821 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" exitCode=0 Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.325776 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:25:56 crc kubenswrapper[4982]: E0122 07:25:56.326109 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.326289 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078"} Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.326325 4982 scope.go:117] "RemoveContainer" containerID="3a502d842903abfcf3ed22217fd513f90f995fa6712f8a8d56a2c3f5ed5f5a08" Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.332309 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-9swd4" event={"ID":"ce14677e-7c96-4c5c-a9a8-76747e002f27","Type":"ContainerStarted","Data":"292655a2236d9296e6a66ea12db28aa96614f86b789fa42d2188039f81a6f4fe"} Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.334090 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-k7x9x" event={"ID":"a6ef98c3-bddb-469d-b06d-7457555f44b9","Type":"ContainerStarted","Data":"d6c45dff75683810b5e2dfd2293133370e20107a1c9dffe155d168d1cbf70def"} Jan 22 07:25:56 crc kubenswrapper[4982]: I0122 07:25:56.786338 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-zn9sp"] Jan 22 07:25:56 crc kubenswrapper[4982]: E0122 07:25:56.959555 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/gthiemonge/octavia-amphora-image:latest" Jan 22 07:25:56 crc kubenswrapper[4982]: E0122 07:25:56.960117 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/gthiemonge/octavia-amphora-image,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:DEST_DIR,Value:/usr/local/apache2/htdocs,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:amphora-image,ReadOnly:false,MountPath:/usr/local/apache2/htdocs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-image-upload-7b97d6bc64-kkzhq_openstack(b83000aa-7a72-4f5c-976d-4716c51c0070): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 07:25:56 crc kubenswrapper[4982]: E0122 07:25:56.961339 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" Jan 22 07:25:57 crc kubenswrapper[4982]: I0122 07:25:57.357826 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-zn9sp" event={"ID":"e995cc53-c18b-4c88-b635-c5ea5798b026","Type":"ContainerStarted","Data":"921e41f63a232ebb8149de0ed8935dd8a441edb3eb4df076521121bb34e12bb5"} Jan 22 07:25:57 crc kubenswrapper[4982]: I0122 07:25:57.367046 4982 generic.go:334] "Generic (PLEG): container finished" podID="71ed9098-7666-4407-9354-f68f4a17c806" containerID="53be74d469bd1c665b5496d3cb1e5e4bd75fd376564b297ff340710fa9123148" exitCode=0 Jan 22 07:25:57 crc kubenswrapper[4982]: I0122 07:25:57.367116 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-9w225" event={"ID":"71ed9098-7666-4407-9354-f68f4a17c806","Type":"ContainerDied","Data":"53be74d469bd1c665b5496d3cb1e5e4bd75fd376564b297ff340710fa9123148"} Jan 22 07:25:57 crc kubenswrapper[4982]: I0122 07:25:57.373455 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:25:57 crc kubenswrapper[4982]: E0122 07:25:57.373793 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:25:57 crc kubenswrapper[4982]: E0122 07:25:57.375321 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/gthiemonge/octavia-amphora-image\\\"\"" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" Jan 22 07:25:58 crc kubenswrapper[4982]: I0122 07:25:58.393170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-5jnkz" event={"ID":"4475df5c-51e9-47ec-9ff8-6030b5b7377e","Type":"ContainerStarted","Data":"d1f3dc71ad7f3f841ee29cd0f610356ddb72d08a0e65f94003c8dab3870cec3c"} Jan 22 07:25:58 crc kubenswrapper[4982]: I0122 07:25:58.396165 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-9w225" event={"ID":"71ed9098-7666-4407-9354-f68f4a17c806","Type":"ContainerStarted","Data":"7134a9b0e4cb6aa9206ce780dd792f364b72e51246cee8f83586e641de5b75cb"} Jan 22 07:25:58 crc kubenswrapper[4982]: I0122 07:25:58.398681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-9swd4" event={"ID":"ce14677e-7c96-4c5c-a9a8-76747e002f27","Type":"ContainerStarted","Data":"890fff68be930ce5f27b6b5b517a3b42c56e808bc537cb3884deaaf3f8d1ae91"} Jan 22 07:25:58 crc kubenswrapper[4982]: I0122 07:25:58.405603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-k7x9x" event={"ID":"a6ef98c3-bddb-469d-b06d-7457555f44b9","Type":"ContainerStarted","Data":"d454b219c75025723fe48897783afbace581a6e3611f2d20bb90deb5a60b7683"} Jan 22 07:25:58 crc kubenswrapper[4982]: I0122 07:25:58.524089 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-9w225" podStartSLOduration=31.524060919 podStartE2EDuration="31.524060919s" podCreationTimestamp="2026-01-22 07:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:25:58.503107092 +0000 UTC m=+6019.341745115" watchObservedRunningTime="2026-01-22 07:25:58.524060919 +0000 UTC m=+6019.362698922" Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.417093 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce14677e-7c96-4c5c-a9a8-76747e002f27" containerID="890fff68be930ce5f27b6b5b517a3b42c56e808bc537cb3884deaaf3f8d1ae91" exitCode=0 Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.417449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-9swd4" event={"ID":"ce14677e-7c96-4c5c-a9a8-76747e002f27","Type":"ContainerDied","Data":"890fff68be930ce5f27b6b5b517a3b42c56e808bc537cb3884deaaf3f8d1ae91"} Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.423215 4982 generic.go:334] "Generic (PLEG): container finished" podID="a6ef98c3-bddb-469d-b06d-7457555f44b9" containerID="d454b219c75025723fe48897783afbace581a6e3611f2d20bb90deb5a60b7683" exitCode=0 Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.423300 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-k7x9x" event={"ID":"a6ef98c3-bddb-469d-b06d-7457555f44b9","Type":"ContainerDied","Data":"d454b219c75025723fe48897783afbace581a6e3611f2d20bb90deb5a60b7683"} Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.433220 4982 generic.go:334] "Generic (PLEG): container finished" podID="4475df5c-51e9-47ec-9ff8-6030b5b7377e" containerID="d1f3dc71ad7f3f841ee29cd0f610356ddb72d08a0e65f94003c8dab3870cec3c" exitCode=0 Jan 22 07:25:59 crc kubenswrapper[4982]: I0122 07:25:59.433600 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-5jnkz" event={"ID":"4475df5c-51e9-47ec-9ff8-6030b5b7377e","Type":"ContainerDied","Data":"d1f3dc71ad7f3f841ee29cd0f610356ddb72d08a0e65f94003c8dab3870cec3c"} Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.443497 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-9swd4" event={"ID":"ce14677e-7c96-4c5c-a9a8-76747e002f27","Type":"ContainerStarted","Data":"9d21970b4254d92290a13c2be7895e41b5a784c899ce1baed22e137e7408768d"} Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.444644 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-9swd4" Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.445462 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-zn9sp" event={"ID":"e995cc53-c18b-4c88-b635-c5ea5798b026","Type":"ContainerStarted","Data":"b9814eacf12a5df9ef7c5f048b2507a83ac96f49050be15d6db00b015302c2a7"} Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.447265 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-k7x9x" event={"ID":"a6ef98c3-bddb-469d-b06d-7457555f44b9","Type":"ContainerStarted","Data":"62bf273d707c173ea8325a112f6692ee4284f46977ca6a5856ef2242ffc15a81"} Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.447428 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.462525 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-9swd4" podStartSLOduration=24.84891892 podStartE2EDuration="26.462509702s" podCreationTimestamp="2026-01-22 07:25:34 +0000 UTC" firstStartedPulling="2026-01-22 07:25:56.130844154 +0000 UTC m=+6016.969482167" lastFinishedPulling="2026-01-22 07:25:57.744434946 +0000 UTC m=+6018.583072949" observedRunningTime="2026-01-22 07:26:00.462385659 +0000 UTC m=+6021.301023662" watchObservedRunningTime="2026-01-22 07:26:00.462509702 +0000 UTC m=+6021.301147705" Jan 22 07:26:00 crc kubenswrapper[4982]: I0122 07:26:00.509912 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-k7x9x" podStartSLOduration=29.509885592 podStartE2EDuration="29.509885592s" podCreationTimestamp="2026-01-22 07:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:26:00.50023137 +0000 UTC m=+6021.338869373" watchObservedRunningTime="2026-01-22 07:26:00.509885592 +0000 UTC m=+6021.348523595" Jan 22 07:26:03 crc kubenswrapper[4982]: I0122 07:26:03.495526 4982 generic.go:334] "Generic (PLEG): container finished" podID="e995cc53-c18b-4c88-b635-c5ea5798b026" containerID="b9814eacf12a5df9ef7c5f048b2507a83ac96f49050be15d6db00b015302c2a7" exitCode=0 Jan 22 07:26:03 crc kubenswrapper[4982]: I0122 07:26:03.495759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-zn9sp" event={"ID":"e995cc53-c18b-4c88-b635-c5ea5798b026","Type":"ContainerDied","Data":"b9814eacf12a5df9ef7c5f048b2507a83ac96f49050be15d6db00b015302c2a7"} Jan 22 07:26:05 crc kubenswrapper[4982]: I0122 07:26:05.376610 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-9swd4" Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.538261 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-zn9sp" event={"ID":"e995cc53-c18b-4c88-b635-c5ea5798b026","Type":"ContainerStarted","Data":"3f67d258d051be285c845a3b95d384acf0427fbc5a823a5ff05d856462cf5352"} Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.538904 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.541075 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-5jnkz" event={"ID":"4475df5c-51e9-47ec-9ff8-6030b5b7377e","Type":"ContainerStarted","Data":"389b4c4e25b4ece252a2c7a0f68e6667784a197303494d3b4b3b5d48b134d8f8"} Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.541220 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.543228 4982 generic.go:334] "Generic (PLEG): container finished" podID="71ed9098-7666-4407-9354-f68f4a17c806" containerID="7134a9b0e4cb6aa9206ce780dd792f364b72e51246cee8f83586e641de5b75cb" exitCode=0 Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.543260 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-9w225" event={"ID":"71ed9098-7666-4407-9354-f68f4a17c806","Type":"ContainerDied","Data":"7134a9b0e4cb6aa9206ce780dd792f364b72e51246cee8f83586e641de5b75cb"} Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.563265 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-zn9sp" podStartSLOduration=32.009479184 podStartE2EDuration="34.563244673s" podCreationTimestamp="2026-01-22 07:25:33 +0000 UTC" firstStartedPulling="2026-01-22 07:25:56.831373701 +0000 UTC m=+6017.670011704" lastFinishedPulling="2026-01-22 07:25:59.38513919 +0000 UTC m=+6020.223777193" observedRunningTime="2026-01-22 07:26:07.56127041 +0000 UTC m=+6028.399908413" watchObservedRunningTime="2026-01-22 07:26:07.563244673 +0000 UTC m=+6028.401882676" Jan 22 07:26:07 crc kubenswrapper[4982]: I0122 07:26:07.577340 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-5jnkz" podStartSLOduration=2.68424461 podStartE2EDuration="42.577319333s" podCreationTimestamp="2026-01-22 07:25:25 +0000 UTC" firstStartedPulling="2026-01-22 07:25:26.635405028 +0000 UTC m=+5987.474043031" lastFinishedPulling="2026-01-22 07:26:06.528479751 +0000 UTC m=+6027.367117754" observedRunningTime="2026-01-22 07:26:07.577168389 +0000 UTC m=+6028.415806392" watchObservedRunningTime="2026-01-22 07:26:07.577319333 +0000 UTC m=+6028.415957346" Jan 22 07:26:08 crc kubenswrapper[4982]: I0122 07:26:08.721760 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:26:08 crc kubenswrapper[4982]: E0122 07:26:08.722788 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:26:08 crc kubenswrapper[4982]: I0122 07:26:08.972187 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-9w225" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.023826 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts\") pod \"71ed9098-7666-4407-9354-f68f4a17c806\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.024197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data\") pod \"71ed9098-7666-4407-9354-f68f4a17c806\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.024354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged\") pod \"71ed9098-7666-4407-9354-f68f4a17c806\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.024469 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle\") pod \"71ed9098-7666-4407-9354-f68f4a17c806\" (UID: \"71ed9098-7666-4407-9354-f68f4a17c806\") " Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.031360 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts" (OuterVolumeSpecName: "scripts") pod "71ed9098-7666-4407-9354-f68f4a17c806" (UID: "71ed9098-7666-4407-9354-f68f4a17c806"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.035488 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data" (OuterVolumeSpecName: "config-data") pod "71ed9098-7666-4407-9354-f68f4a17c806" (UID: "71ed9098-7666-4407-9354-f68f4a17c806"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.061056 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "71ed9098-7666-4407-9354-f68f4a17c806" (UID: "71ed9098-7666-4407-9354-f68f4a17c806"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.068794 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71ed9098-7666-4407-9354-f68f4a17c806" (UID: "71ed9098-7666-4407-9354-f68f4a17c806"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.125804 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.126161 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.126172 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/71ed9098-7666-4407-9354-f68f4a17c806-config-data-merged\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.126182 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71ed9098-7666-4407-9354-f68f4a17c806-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.583810 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-9w225" event={"ID":"71ed9098-7666-4407-9354-f68f4a17c806","Type":"ContainerDied","Data":"5af9e2ba17a58c69b7e9fd4b3ee1c3fb67d181e1104881e0c6d3c3ef07095a19"} Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.583896 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5af9e2ba17a58c69b7e9fd4b3ee1c3fb67d181e1104881e0c6d3c3ef07095a19" Jan 22 07:26:09 crc kubenswrapper[4982]: I0122 07:26:09.584095 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-9w225" Jan 22 07:26:11 crc kubenswrapper[4982]: I0122 07:26:11.051451 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-5jnkz" Jan 22 07:26:12 crc kubenswrapper[4982]: I0122 07:26:12.611267 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerStarted","Data":"e4a714809762dbeaa173efe40fae31c8c7e92c853e12864692690008bd1ba99e"} Jan 22 07:26:14 crc kubenswrapper[4982]: I0122 07:26:14.060983 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-qnsmv"] Jan 22 07:26:14 crc kubenswrapper[4982]: I0122 07:26:14.072367 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1e21-account-create-update-kbzw6"] Jan 22 07:26:14 crc kubenswrapper[4982]: I0122 07:26:14.082609 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-qnsmv"] Jan 22 07:26:14 crc kubenswrapper[4982]: I0122 07:26:14.093920 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1e21-account-create-update-kbzw6"] Jan 22 07:26:15 crc kubenswrapper[4982]: I0122 07:26:15.732533 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d28c8926-6e80-4666-8051-6decfafa89f2" path="/var/lib/kubelet/pods/d28c8926-6e80-4666-8051-6decfafa89f2/volumes" Jan 22 07:26:15 crc kubenswrapper[4982]: I0122 07:26:15.733634 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5943822-9460-4636-881e-1a37d605b4c6" path="/var/lib/kubelet/pods/d5943822-9460-4636-881e-1a37d605b4c6/volumes" Jan 22 07:26:17 crc kubenswrapper[4982]: I0122 07:26:17.383298 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-k7x9x" Jan 22 07:26:19 crc kubenswrapper[4982]: I0122 07:26:19.162903 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-zn9sp" Jan 22 07:26:20 crc kubenswrapper[4982]: I0122 07:26:20.047628 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jqfl4"] Jan 22 07:26:20 crc kubenswrapper[4982]: I0122 07:26:20.059298 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jqfl4"] Jan 22 07:26:20 crc kubenswrapper[4982]: I0122 07:26:20.700988 4982 generic.go:334] "Generic (PLEG): container finished" podID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerID="e4a714809762dbeaa173efe40fae31c8c7e92c853e12864692690008bd1ba99e" exitCode=0 Jan 22 07:26:20 crc kubenswrapper[4982]: I0122 07:26:20.701088 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerDied","Data":"e4a714809762dbeaa173efe40fae31c8c7e92c853e12864692690008bd1ba99e"} Jan 22 07:26:21 crc kubenswrapper[4982]: I0122 07:26:21.713330 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerStarted","Data":"f0951b1df0d28eaa08b65a254efe685792f1af40fb14ba1d15329d86deb652ca"} Jan 22 07:26:21 crc kubenswrapper[4982]: I0122 07:26:21.720075 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:26:21 crc kubenswrapper[4982]: E0122 07:26:21.720477 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:26:21 crc kubenswrapper[4982]: I0122 07:26:21.735831 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc572594-caf5-409f-a25f-31d354137ba1" path="/var/lib/kubelet/pods/cc572594-caf5-409f-a25f-31d354137ba1/volumes" Jan 22 07:26:21 crc kubenswrapper[4982]: I0122 07:26:21.757425 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" podStartSLOduration=11.203161668 podStartE2EDuration="55.757403268s" podCreationTimestamp="2026-01-22 07:25:26 +0000 UTC" firstStartedPulling="2026-01-22 07:25:27.355379859 +0000 UTC m=+5988.194017862" lastFinishedPulling="2026-01-22 07:26:11.909621459 +0000 UTC m=+6032.748259462" observedRunningTime="2026-01-22 07:26:21.736677729 +0000 UTC m=+6042.575315742" watchObservedRunningTime="2026-01-22 07:26:21.757403268 +0000 UTC m=+6042.596041281" Jan 22 07:26:35 crc kubenswrapper[4982]: I0122 07:26:35.719572 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:26:35 crc kubenswrapper[4982]: E0122 07:26:35.720660 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.766950 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:26:37 crc kubenswrapper[4982]: E0122 07:26:37.767659 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ed9098-7666-4407-9354-f68f4a17c806" containerName="octavia-db-sync" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.767685 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ed9098-7666-4407-9354-f68f4a17c806" containerName="octavia-db-sync" Jan 22 07:26:37 crc kubenswrapper[4982]: E0122 07:26:37.767701 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ed9098-7666-4407-9354-f68f4a17c806" containerName="init" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.767707 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ed9098-7666-4407-9354-f68f4a17c806" containerName="init" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.767912 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ed9098-7666-4407-9354-f68f4a17c806" containerName="octavia-db-sync" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.769341 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.783953 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.870301 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.870807 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.870947 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92kcp\" (UniqueName: \"kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.972718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.972787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.972826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92kcp\" (UniqueName: \"kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.973278 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.973349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:37 crc kubenswrapper[4982]: I0122 07:26:37.996413 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92kcp\" (UniqueName: \"kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp\") pod \"community-operators-nl6xj\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:38 crc kubenswrapper[4982]: I0122 07:26:38.091721 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:43 crc kubenswrapper[4982]: I0122 07:26:43.865896 4982 trace.go:236] Trace[1554910671]: "iptables ChainExists" (22-Jan-2026 07:26:39.723) (total time: 4142ms): Jan 22 07:26:43 crc kubenswrapper[4982]: Trace[1554910671]: [4.142235192s] [4.142235192s] END Jan 22 07:26:43 crc kubenswrapper[4982]: I0122 07:26:43.872266 4982 trace.go:236] Trace[222920915]: "iptables ChainExists" (22-Jan-2026 07:26:39.726) (total time: 4146ms): Jan 22 07:26:43 crc kubenswrapper[4982]: Trace[222920915]: [4.146164279s] [4.146164279s] END Jan 22 07:26:44 crc kubenswrapper[4982]: I0122 07:26:44.306819 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:26:45 crc kubenswrapper[4982]: I0122 07:26:45.077601 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerStarted","Data":"7cedfe6ff115b6751383cc68fb55d4b0a42467a433d96e1cf7fb975a54525505"} Jan 22 07:26:46 crc kubenswrapper[4982]: I0122 07:26:46.087984 4982 generic.go:334] "Generic (PLEG): container finished" podID="66379d95-fec3-44c4-8885-604f23dc440f" containerID="0975e426cbd86fe04bbb4f5b5fc7ee81ff433f26b5f74440eac37165bc587e44" exitCode=0 Jan 22 07:26:46 crc kubenswrapper[4982]: I0122 07:26:46.088077 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerDied","Data":"0975e426cbd86fe04bbb4f5b5fc7ee81ff433f26b5f74440eac37165bc587e44"} Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.058320 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-13ab-account-create-update-bksl7"] Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.071866 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-ff6jz"] Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.082537 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-13ab-account-create-update-bksl7"] Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.092048 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-ff6jz"] Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.737210 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dc8696d-82e4-4a14-a3c0-b265a7f39b62" path="/var/lib/kubelet/pods/7dc8696d-82e4-4a14-a3c0-b265a7f39b62/volumes" Jan 22 07:26:49 crc kubenswrapper[4982]: I0122 07:26:49.738428 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d616c023-d46f-43c4-bd46-a854aa01403c" path="/var/lib/kubelet/pods/d616c023-d46f-43c4-bd46-a854aa01403c/volumes" Jan 22 07:26:50 crc kubenswrapper[4982]: I0122 07:26:50.719114 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:26:50 crc kubenswrapper[4982]: E0122 07:26:50.719592 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:26:52 crc kubenswrapper[4982]: I0122 07:26:52.157974 4982 generic.go:334] "Generic (PLEG): container finished" podID="66379d95-fec3-44c4-8885-604f23dc440f" containerID="ab3f9339fa3a4ce2d240ea47d46e320db10d1f47b609e766446ca4ac5edc4a2a" exitCode=0 Jan 22 07:26:52 crc kubenswrapper[4982]: I0122 07:26:52.158073 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerDied","Data":"ab3f9339fa3a4ce2d240ea47d46e320db10d1f47b609e766446ca4ac5edc4a2a"} Jan 22 07:26:54 crc kubenswrapper[4982]: I0122 07:26:54.038843 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:26:54 crc kubenswrapper[4982]: I0122 07:26:54.039120 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="octavia-amphora-httpd" containerID="cri-o://f0951b1df0d28eaa08b65a254efe685792f1af40fb14ba1d15329d86deb652ca" gracePeriod=30 Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.200022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerStarted","Data":"ff67c9d1e2b58bd7b206e5c72e7cb8e42815df578a898421dc8bc44f5c8d480a"} Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.202830 4982 generic.go:334] "Generic (PLEG): container finished" podID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerID="f0951b1df0d28eaa08b65a254efe685792f1af40fb14ba1d15329d86deb652ca" exitCode=0 Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.202908 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerDied","Data":"f0951b1df0d28eaa08b65a254efe685792f1af40fb14ba1d15329d86deb652ca"} Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.438702 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.529543 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image\") pod \"b83000aa-7a72-4f5c-976d-4716c51c0070\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.529626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config\") pod \"b83000aa-7a72-4f5c-976d-4716c51c0070\" (UID: \"b83000aa-7a72-4f5c-976d-4716c51c0070\") " Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.565786 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b83000aa-7a72-4f5c-976d-4716c51c0070" (UID: "b83000aa-7a72-4f5c-976d-4716c51c0070"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.596007 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "b83000aa-7a72-4f5c-976d-4716c51c0070" (UID: "b83000aa-7a72-4f5c-976d-4716c51c0070"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.632115 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b83000aa-7a72-4f5c-976d-4716c51c0070-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:55 crc kubenswrapper[4982]: I0122 07:26:55.632154 4982 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b83000aa-7a72-4f5c-976d-4716c51c0070-amphora-image\") on node \"crc\" DevicePath \"\"" Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.212181 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.212996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-kkzhq" event={"ID":"b83000aa-7a72-4f5c-976d-4716c51c0070","Type":"ContainerDied","Data":"fe9ac7993537148031348691a8d97ee7112ec960b6e74f6243815a5387173ecd"} Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.213027 4982 scope.go:117] "RemoveContainer" containerID="f0951b1df0d28eaa08b65a254efe685792f1af40fb14ba1d15329d86deb652ca" Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.243222 4982 scope.go:117] "RemoveContainer" containerID="e4a714809762dbeaa173efe40fae31c8c7e92c853e12864692690008bd1ba99e" Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.245224 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nl6xj" podStartSLOduration=10.681840408 podStartE2EDuration="19.245204273s" podCreationTimestamp="2026-01-22 07:26:37 +0000 UTC" firstStartedPulling="2026-01-22 07:26:46.090844916 +0000 UTC m=+6066.929482929" lastFinishedPulling="2026-01-22 07:26:54.654208791 +0000 UTC m=+6075.492846794" observedRunningTime="2026-01-22 07:26:56.22840985 +0000 UTC m=+6077.067047853" watchObservedRunningTime="2026-01-22 07:26:56.245204273 +0000 UTC m=+6077.083842276" Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.260331 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:26:56 crc kubenswrapper[4982]: I0122 07:26:56.271958 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-kkzhq"] Jan 22 07:26:57 crc kubenswrapper[4982]: I0122 07:26:57.740439 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" path="/var/lib/kubelet/pods/b83000aa-7a72-4f5c-976d-4716c51c0070/volumes" Jan 22 07:26:58 crc kubenswrapper[4982]: I0122 07:26:58.092333 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:58 crc kubenswrapper[4982]: I0122 07:26:58.093919 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:26:58 crc kubenswrapper[4982]: I0122 07:26:58.145775 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:27:00 crc kubenswrapper[4982]: I0122 07:27:00.039616 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-ssz98"] Jan 22 07:27:00 crc kubenswrapper[4982]: I0122 07:27:00.048658 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-ssz98"] Jan 22 07:27:01 crc kubenswrapper[4982]: I0122 07:27:01.733516 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe56fa86-31ae-4c80-b1e5-ae439cd33588" path="/var/lib/kubelet/pods/fe56fa86-31ae-4c80-b1e5-ae439cd33588/volumes" Jan 22 07:27:04 crc kubenswrapper[4982]: I0122 07:27:04.877675 4982 scope.go:117] "RemoveContainer" containerID="623a8e9c01aad45ebea0f753127bd0ad71cfd276ef4199ddefaeca0304df9e6f" Jan 22 07:27:04 crc kubenswrapper[4982]: E0122 07:27:04.929448 4982 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="3.21s" Jan 22 07:27:04 crc kubenswrapper[4982]: I0122 07:27:04.949816 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:27:04 crc kubenswrapper[4982]: E0122 07:27:04.950394 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:27:04 crc kubenswrapper[4982]: I0122 07:27:04.978629 4982 scope.go:117] "RemoveContainer" containerID="81f6c7aced0303c529992bf97c04ef9a3ba3c8ea445cdb1aa7dd5a731632d16f" Jan 22 07:27:05 crc kubenswrapper[4982]: I0122 07:27:05.025086 4982 scope.go:117] "RemoveContainer" containerID="8c0c5c8f5d0268219022e3a0d3a8996b4f72ba2d3516a2f360ce1e767696eebe" Jan 22 07:27:05 crc kubenswrapper[4982]: I0122 07:27:05.065644 4982 scope.go:117] "RemoveContainer" containerID="e1ad03a89efc3ad91fd023e9f8816b319215301524b8089f74efe076dc0ad631" Jan 22 07:27:05 crc kubenswrapper[4982]: I0122 07:27:05.104511 4982 scope.go:117] "RemoveContainer" containerID="cf075430cd109d549993ac7c9f8dab26b6eddc22f78e512d93c12e0470c0a501" Jan 22 07:27:05 crc kubenswrapper[4982]: I0122 07:27:05.142773 4982 scope.go:117] "RemoveContainer" containerID="d1cda6062ceb7c495ef42bee5a9cc32e73da65d43a1664cdd8d25126479b1d35" Jan 22 07:27:08 crc kubenswrapper[4982]: I0122 07:27:08.139202 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:27:08 crc kubenswrapper[4982]: I0122 07:27:08.199161 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:27:08 crc kubenswrapper[4982]: I0122 07:27:08.925843 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nl6xj" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="registry-server" containerID="cri-o://ff67c9d1e2b58bd7b206e5c72e7cb8e42815df578a898421dc8bc44f5c8d480a" gracePeriod=2 Jan 22 07:27:12 crc kubenswrapper[4982]: I0122 07:27:12.965244 4982 generic.go:334] "Generic (PLEG): container finished" podID="66379d95-fec3-44c4-8885-604f23dc440f" containerID="ff67c9d1e2b58bd7b206e5c72e7cb8e42815df578a898421dc8bc44f5c8d480a" exitCode=0 Jan 22 07:27:12 crc kubenswrapper[4982]: I0122 07:27:12.965312 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerDied","Data":"ff67c9d1e2b58bd7b206e5c72e7cb8e42815df578a898421dc8bc44f5c8d480a"} Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.764715 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.890328 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92kcp\" (UniqueName: \"kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp\") pod \"66379d95-fec3-44c4-8885-604f23dc440f\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.890696 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities\") pod \"66379d95-fec3-44c4-8885-604f23dc440f\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.891411 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities" (OuterVolumeSpecName: "utilities") pod "66379d95-fec3-44c4-8885-604f23dc440f" (UID: "66379d95-fec3-44c4-8885-604f23dc440f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.891487 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content\") pod \"66379d95-fec3-44c4-8885-604f23dc440f\" (UID: \"66379d95-fec3-44c4-8885-604f23dc440f\") " Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.892011 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.895642 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp" (OuterVolumeSpecName: "kube-api-access-92kcp") pod "66379d95-fec3-44c4-8885-604f23dc440f" (UID: "66379d95-fec3-44c4-8885-604f23dc440f"). InnerVolumeSpecName "kube-api-access-92kcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.949155 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66379d95-fec3-44c4-8885-604f23dc440f" (UID: "66379d95-fec3-44c4-8885-604f23dc440f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.981182 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl6xj" event={"ID":"66379d95-fec3-44c4-8885-604f23dc440f","Type":"ContainerDied","Data":"7cedfe6ff115b6751383cc68fb55d4b0a42467a433d96e1cf7fb975a54525505"} Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.981232 4982 scope.go:117] "RemoveContainer" containerID="ff67c9d1e2b58bd7b206e5c72e7cb8e42815df578a898421dc8bc44f5c8d480a" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.982021 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl6xj" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.996291 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66379d95-fec3-44c4-8885-604f23dc440f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:13 crc kubenswrapper[4982]: I0122 07:27:13.996322 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92kcp\" (UniqueName: \"kubernetes.io/projected/66379d95-fec3-44c4-8885-604f23dc440f-kube-api-access-92kcp\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:14 crc kubenswrapper[4982]: I0122 07:27:14.006239 4982 scope.go:117] "RemoveContainer" containerID="ab3f9339fa3a4ce2d240ea47d46e320db10d1f47b609e766446ca4ac5edc4a2a" Jan 22 07:27:14 crc kubenswrapper[4982]: I0122 07:27:14.034995 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:27:14 crc kubenswrapper[4982]: I0122 07:27:14.045069 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nl6xj"] Jan 22 07:27:14 crc kubenswrapper[4982]: I0122 07:27:14.047901 4982 scope.go:117] "RemoveContainer" containerID="0975e426cbd86fe04bbb4f5b5fc7ee81ff433f26b5f74440eac37165bc587e44" Jan 22 07:27:15 crc kubenswrapper[4982]: I0122 07:27:15.733482 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66379d95-fec3-44c4-8885-604f23dc440f" path="/var/lib/kubelet/pods/66379d95-fec3-44c4-8885-604f23dc440f/volumes" Jan 22 07:27:19 crc kubenswrapper[4982]: I0122 07:27:19.724807 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:27:19 crc kubenswrapper[4982]: E0122 07:27:19.725570 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:27:32 crc kubenswrapper[4982]: I0122 07:27:32.719120 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:27:32 crc kubenswrapper[4982]: E0122 07:27:32.719770 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:27:42 crc kubenswrapper[4982]: I0122 07:27:42.066316 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-60c8-account-create-update-tww9j"] Jan 22 07:27:42 crc kubenswrapper[4982]: I0122 07:27:42.075784 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-60c8-account-create-update-tww9j"] Jan 22 07:27:42 crc kubenswrapper[4982]: I0122 07:27:42.083927 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-rpjv8"] Jan 22 07:27:42 crc kubenswrapper[4982]: I0122 07:27:42.105964 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-rpjv8"] Jan 22 07:27:43 crc kubenswrapper[4982]: I0122 07:27:43.740923 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bd7c64b-dbde-48c4-9660-973f7d090527" path="/var/lib/kubelet/pods/7bd7c64b-dbde-48c4-9660-973f7d090527/volumes" Jan 22 07:27:43 crc kubenswrapper[4982]: I0122 07:27:43.742479 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f867a75e-cf76-4cd4-8358-cccc26e0eb1d" path="/var/lib/kubelet/pods/f867a75e-cf76-4cd4-8358-cccc26e0eb1d/volumes" Jan 22 07:27:44 crc kubenswrapper[4982]: I0122 07:27:44.778307 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:27:44 crc kubenswrapper[4982]: E0122 07:27:44.779120 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.565473 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:27:50 crc kubenswrapper[4982]: E0122 07:27:50.566400 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="init" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566415 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="init" Jan 22 07:27:50 crc kubenswrapper[4982]: E0122 07:27:50.566436 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="octavia-amphora-httpd" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566444 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="octavia-amphora-httpd" Jan 22 07:27:50 crc kubenswrapper[4982]: E0122 07:27:50.566457 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="extract-content" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566465 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="extract-content" Jan 22 07:27:50 crc kubenswrapper[4982]: E0122 07:27:50.566488 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="extract-utilities" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566494 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="extract-utilities" Jan 22 07:27:50 crc kubenswrapper[4982]: E0122 07:27:50.566508 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="registry-server" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566513 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="registry-server" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566677 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b83000aa-7a72-4f5c-976d-4716c51c0070" containerName="octavia-amphora-httpd" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.566689 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="66379d95-fec3-44c4-8885-604f23dc440f" containerName="registry-server" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.567636 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.569190 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.570318 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.570474 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-cgwnn" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.572652 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.596576 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.620057 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.620387 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-log" containerID="cri-o://992a53867c4a533ddc7603f80e288115bbe56be21aa45483c21b4394c6ac9c6e" gracePeriod=30 Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.626484 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-httpd" containerID="cri-o://fb1a5a2e9419aadbe51b3516c9aaa69f2b8583709e065ace2eb4583bdae9b71a" gracePeriod=30 Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.683487 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.683753 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-log" containerID="cri-o://107d3502545453e5c563d9412a282714fd9d07c1c857236ba0ca9fb27a76bb74" gracePeriod=30 Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.683923 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-httpd" containerID="cri-o://f9262139b9c3e5cfcce5a204b83a9c760bb2a4290a1e2b4102a25d2e366247ac" gracePeriod=30 Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.699772 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.703862 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.717116 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.746937 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.746998 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.747021 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d4bz\" (UniqueName: \"kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.747111 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.747164 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.848959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849008 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849034 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849096 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849116 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849222 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxhq7\" (UniqueName: \"kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849261 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849285 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d4bz\" (UniqueName: \"kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.849654 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.850395 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.850795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.856482 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.869995 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d4bz\" (UniqueName: \"kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz\") pod \"horizon-64d5fcfdbf-4w98n\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.902032 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.950842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.950952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.950974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxhq7\" (UniqueName: \"kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.951074 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.951096 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.951544 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.952142 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.953054 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.954514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:50 crc kubenswrapper[4982]: I0122 07:27:50.971576 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxhq7\" (UniqueName: \"kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7\") pod \"horizon-99b96d4f5-7d75d\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.064386 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.250812 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.284087 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.286887 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.304948 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.334404 4982 generic.go:334] "Generic (PLEG): container finished" podID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerID="107d3502545453e5c563d9412a282714fd9d07c1c857236ba0ca9fb27a76bb74" exitCode=143 Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.334475 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerDied","Data":"107d3502545453e5c563d9412a282714fd9d07c1c857236ba0ca9fb27a76bb74"} Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.336213 4982 generic.go:334] "Generic (PLEG): container finished" podID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerID="992a53867c4a533ddc7603f80e288115bbe56be21aa45483c21b4394c6ac9c6e" exitCode=143 Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.336234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerDied","Data":"992a53867c4a533ddc7603f80e288115bbe56be21aa45483c21b4394c6ac9c6e"} Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.377728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.377787 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.377970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.378227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frqc2\" (UniqueName: \"kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.378369 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.475466 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.479908 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.479965 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.480010 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.480103 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frqc2\" (UniqueName: \"kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.480161 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.481093 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.481780 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.482292 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.488062 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.496966 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frqc2\" (UniqueName: \"kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2\") pod \"horizon-7b44bfdd57-k7hnf\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.616526 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:27:51 crc kubenswrapper[4982]: I0122 07:27:51.618513 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:27:52 crc kubenswrapper[4982]: I0122 07:27:52.073351 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:27:52 crc kubenswrapper[4982]: I0122 07:27:52.348240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerStarted","Data":"e67e2798d1be111fb785d69ab542bea6a4a1a5165a6c4565cac7c0b117b27180"} Jan 22 07:27:52 crc kubenswrapper[4982]: I0122 07:27:52.349729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d5fcfdbf-4w98n" event={"ID":"339a4c9e-4b9f-4806-9506-272ad5d7fa89","Type":"ContainerStarted","Data":"b54a51a19fe765413eead1b4036d34287acbf2ab9119586b0abb21357b95241b"} Jan 22 07:27:52 crc kubenswrapper[4982]: I0122 07:27:52.351059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerStarted","Data":"671c066c1ae8a9f53bb2d5d8e91706d09a6980664638ff03ac1156488df5939f"} Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.379375 4982 generic.go:334] "Generic (PLEG): container finished" podID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerID="fb1a5a2e9419aadbe51b3516c9aaa69f2b8583709e065ace2eb4583bdae9b71a" exitCode=0 Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.379449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerDied","Data":"fb1a5a2e9419aadbe51b3516c9aaa69f2b8583709e065ace2eb4583bdae9b71a"} Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.382057 4982 generic.go:334] "Generic (PLEG): container finished" podID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerID="f9262139b9c3e5cfcce5a204b83a9c760bb2a4290a1e2b4102a25d2e366247ac" exitCode=0 Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.382087 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerDied","Data":"f9262139b9c3e5cfcce5a204b83a9c760bb2a4290a1e2b4102a25d2e366247ac"} Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.630982 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656497 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656583 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656602 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2v8zh\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656655 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.656777 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run\") pod \"d54d454d-6a5e-44e2-bb24-ca5783182099\" (UID: \"d54d454d-6a5e-44e2-bb24-ca5783182099\") " Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.657550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.657882 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs" (OuterVolumeSpecName: "logs") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.754392 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts" (OuterVolumeSpecName: "scripts") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.760257 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.760301 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d54d454d-6a5e-44e2-bb24-ca5783182099-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.760314 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.772243 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph" (OuterVolumeSpecName: "ceph") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.772321 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh" (OuterVolumeSpecName: "kube-api-access-2v8zh") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "kube-api-access-2v8zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.803783 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.862030 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.862057 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2v8zh\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-kube-api-access-2v8zh\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.862068 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d54d454d-6a5e-44e2-bb24-ca5783182099-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:54 crc kubenswrapper[4982]: I0122 07:27:54.995460 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data" (OuterVolumeSpecName: "config-data") pod "d54d454d-6a5e-44e2-bb24-ca5783182099" (UID: "d54d454d-6a5e-44e2-bb24-ca5783182099"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.057730 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rznxl"] Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.066473 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d54d454d-6a5e-44e2-bb24-ca5783182099-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.075262 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rznxl"] Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.402876 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d54d454d-6a5e-44e2-bb24-ca5783182099","Type":"ContainerDied","Data":"4d16fd7a879a515a6d7ed5e663523c4603b7428d44dd0f1d888e9fdb9a97c22e"} Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.402928 4982 scope.go:117] "RemoveContainer" containerID="fb1a5a2e9419aadbe51b3516c9aaa69f2b8583709e065ace2eb4583bdae9b71a" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.402946 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.435278 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.445339 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.466878 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:27:55 crc kubenswrapper[4982]: E0122 07:27:55.467885 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-httpd" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.467918 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-httpd" Jan 22 07:27:55 crc kubenswrapper[4982]: E0122 07:27:55.467984 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-log" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.467994 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-log" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.468366 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-httpd" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.468403 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" containerName="glance-log" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.470679 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.473249 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.486803 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574272 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hcq8\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-kube-api-access-7hcq8\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574529 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574702 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.574996 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-logs\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.676842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.677775 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.677883 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.677939 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.677995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-logs\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.678034 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hcq8\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-kube-api-access-7hcq8\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.678119 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.678894 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-logs\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.678992 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b55cf19-d25f-4825-b61e-78123b248e23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.681878 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-ceph\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.681905 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.682628 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-scripts\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.684382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b55cf19-d25f-4825-b61e-78123b248e23-config-data\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.706210 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hcq8\" (UniqueName: \"kubernetes.io/projected/6b55cf19-d25f-4825-b61e-78123b248e23-kube-api-access-7hcq8\") pod \"glance-default-external-api-0\" (UID: \"6b55cf19-d25f-4825-b61e-78123b248e23\") " pod="openstack/glance-default-external-api-0" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.739587 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2118180-20f1-4fc4-ba92-ee7b23e3d082" path="/var/lib/kubelet/pods/b2118180-20f1-4fc4-ba92-ee7b23e3d082/volumes" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.740513 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d54d454d-6a5e-44e2-bb24-ca5783182099" path="/var/lib/kubelet/pods/d54d454d-6a5e-44e2-bb24-ca5783182099/volumes" Jan 22 07:27:55 crc kubenswrapper[4982]: I0122 07:27:55.808038 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 07:27:59 crc kubenswrapper[4982]: I0122 07:27:59.735547 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:27:59 crc kubenswrapper[4982]: E0122 07:27:59.736150 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.777279 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.779958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.784014 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf4ln\" (UniqueName: \"kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.784202 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.784506 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.800629 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.885811 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf4ln\" (UniqueName: \"kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.885924 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.886039 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.886515 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.886542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:00 crc kubenswrapper[4982]: I0122 07:28:00.907085 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf4ln\" (UniqueName: \"kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln\") pod \"redhat-operators-vmc98\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:01 crc kubenswrapper[4982]: I0122 07:28:01.108222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.759612 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.926848 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927130 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927200 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927241 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxwpk\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927370 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927482 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.927522 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data\") pod \"056a895d-8530-4180-a7fb-42e6bc47a2b5\" (UID: \"056a895d-8530-4180-a7fb-42e6bc47a2b5\") " Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.928183 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.928330 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs" (OuterVolumeSpecName: "logs") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.935623 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph" (OuterVolumeSpecName: "ceph") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.937789 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk" (OuterVolumeSpecName: "kube-api-access-sxwpk") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "kube-api-access-sxwpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.944981 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts" (OuterVolumeSpecName: "scripts") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.963635 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:28:02 crc kubenswrapper[4982]: I0122 07:28:02.981662 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data" (OuterVolumeSpecName: "config-data") pod "056a895d-8530-4180-a7fb-42e6bc47a2b5" (UID: "056a895d-8530-4180-a7fb-42e6bc47a2b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030655 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030707 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030719 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxwpk\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-kube-api-access-sxwpk\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030731 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/056a895d-8530-4180-a7fb-42e6bc47a2b5-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030743 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/056a895d-8530-4180-a7fb-42e6bc47a2b5-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030753 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.030763 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/056a895d-8530-4180-a7fb-42e6bc47a2b5-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.508994 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"056a895d-8530-4180-a7fb-42e6bc47a2b5","Type":"ContainerDied","Data":"465b3a90e22df6991bd1c24e890238f6de119eb6bf93be1c265100bce3376731"} Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.509110 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.581339 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.610604 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.629283 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:28:03 crc kubenswrapper[4982]: E0122 07:28:03.629759 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-log" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.629779 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-log" Jan 22 07:28:03 crc kubenswrapper[4982]: E0122 07:28:03.629811 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-httpd" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.629824 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-httpd" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.630104 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-log" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.630128 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" containerName="glance-httpd" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.641495 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.641583 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.643671 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645442 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-logs\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645508 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645575 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8tnh\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-kube-api-access-x8tnh\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645628 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645729 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.645810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.739245 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="056a895d-8530-4180-a7fb-42e6bc47a2b5" path="/var/lib/kubelet/pods/056a895d-8530-4180-a7fb-42e6bc47a2b5/volumes" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.750118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.750174 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.750252 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-logs\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.751000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-logs\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.751707 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.751909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8tnh\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-kube-api-access-x8tnh\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.752070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.752323 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.753571 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61476283-6be6-45db-b4dd-e4cb19492799-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.759316 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.759648 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.760077 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.760122 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61476283-6be6-45db-b4dd-e4cb19492799-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.774470 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8tnh\" (UniqueName: \"kubernetes.io/projected/61476283-6be6-45db-b4dd-e4cb19492799-kube-api-access-x8tnh\") pod \"glance-default-internal-api-0\" (UID: \"61476283-6be6-45db-b4dd-e4cb19492799\") " pod="openstack/glance-default-internal-api-0" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.929489 4982 scope.go:117] "RemoveContainer" containerID="992a53867c4a533ddc7603f80e288115bbe56be21aa45483c21b4394c6ac9c6e" Jan 22 07:28:03 crc kubenswrapper[4982]: I0122 07:28:03.959450 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.159711 4982 scope.go:117] "RemoveContainer" containerID="f9262139b9c3e5cfcce5a204b83a9c760bb2a4290a1e2b4102a25d2e366247ac" Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.203945 4982 scope.go:117] "RemoveContainer" containerID="107d3502545453e5c563d9412a282714fd9d07c1c857236ba0ca9fb27a76bb74" Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.445292 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:04 crc kubenswrapper[4982]: W0122 07:28:04.446708 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27e6c0cf_5e12_4a5b_b1ed_ec10dba19ca2.slice/crio-1d2ef909ce1494430e79f00cee14e074788b0bb299bfaaf9516882bc54524c7c WatchSource:0}: Error finding container 1d2ef909ce1494430e79f00cee14e074788b0bb299bfaaf9516882bc54524c7c: Status 404 returned error can't find the container with id 1d2ef909ce1494430e79f00cee14e074788b0bb299bfaaf9516882bc54524c7c Jan 22 07:28:04 crc kubenswrapper[4982]: W0122 07:28:04.496187 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b55cf19_d25f_4825_b61e_78123b248e23.slice/crio-2f13b55b46cdefbd907dc59936d580063c198c54ae02726070e1776428536746 WatchSource:0}: Error finding container 2f13b55b46cdefbd907dc59936d580063c198c54ae02726070e1776428536746: Status 404 returned error can't find the container with id 2f13b55b46cdefbd907dc59936d580063c198c54ae02726070e1776428536746 Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.496531 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.523110 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b55cf19-d25f-4825-b61e-78123b248e23","Type":"ContainerStarted","Data":"2f13b55b46cdefbd907dc59936d580063c198c54ae02726070e1776428536746"} Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.525042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerStarted","Data":"1d2ef909ce1494430e79f00cee14e074788b0bb299bfaaf9516882bc54524c7c"} Jan 22 07:28:04 crc kubenswrapper[4982]: I0122 07:28:04.847530 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 07:28:05 crc kubenswrapper[4982]: I0122 07:28:05.541219 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61476283-6be6-45db-b4dd-e4cb19492799","Type":"ContainerStarted","Data":"06eb9f94ac16bf41b79b9e23e7bef8e13dd1d26cb5b3fec7c99b5a04b5d7631f"} Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.685567 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.685728 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n547h5c6h5d9h8dh65bh6ch8hbdh596hfdh5b9h5b9h647h659h66dh676h696h5d6h549h87h5c6h586h58bh56bhc5h597hc6h64dh55fh569hb7h698q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pxhq7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-99b96d4f5-7d75d_openstack(d6de4e3a-13ce-476a-bff4-cdabdcc138c8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.688145 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.701542 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.701712 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f6hbbhbdh58h5fh59ch8h694h5c5hfdh684h59hbfh66fh57fh689h676h67fh9bh68h5ddhbh5h68h545hd9h659h57chf9h596h579h5dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-frqc2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7b44bfdd57-k7hnf_openstack(db75fc6f-fad5-4212-be6b-e310f66ee471): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.705040 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.711340 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.711479 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b5h5fbh58fh679h57h54bh5bdh64bh5d4h655h54dh97h5d4h67fh56fh66fhf9hbdh67h647h65ch675h564h596hd6hcch7hf7h75hcbh7fhbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8d4bz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-64d5fcfdbf-4w98n_openstack(339a4c9e-4b9f-4806-9506-272ad5d7fa89): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 07:28:05 crc kubenswrapper[4982]: E0122 07:28:05.714027 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-64d5fcfdbf-4w98n" podUID="339a4c9e-4b9f-4806-9506-272ad5d7fa89" Jan 22 07:28:05 crc kubenswrapper[4982]: I0122 07:28:05.777142 4982 scope.go:117] "RemoveContainer" containerID="2a43c1eddb99759bb7dd764fb472489872dd2e099731e3117c3f5c2aa223d9ff" Jan 22 07:28:05 crc kubenswrapper[4982]: I0122 07:28:05.911803 4982 scope.go:117] "RemoveContainer" containerID="ef3061a05dc05af79a876f392559e59ab6dda6e222e3af92a09956d034be7137" Jan 22 07:28:05 crc kubenswrapper[4982]: I0122 07:28:05.966166 4982 scope.go:117] "RemoveContainer" containerID="7b92b28a084b034c412278e3958497b45e7762b3d89d63eb03b38924c58fbb03" Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.557048 4982 generic.go:334] "Generic (PLEG): container finished" podID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerID="009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70" exitCode=0 Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.557386 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerDied","Data":"009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70"} Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.564594 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b55cf19-d25f-4825-b61e-78123b248e23","Type":"ContainerStarted","Data":"a8f7e402d6f1b98197ac461bb795ca64d2c9a91c40cacd399e023b8f831254c4"} Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.564648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6b55cf19-d25f-4825-b61e-78123b248e23","Type":"ContainerStarted","Data":"b51bea2762808f29dfa41379a11c434a8cb89ab4395ffaf7806e309a48e1e62a"} Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.573736 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61476283-6be6-45db-b4dd-e4cb19492799","Type":"ContainerStarted","Data":"b665d37615e7a905c628c29179dd04cd49b698def356c45b5c7ab3d70ec20b2a"} Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.573769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61476283-6be6-45db-b4dd-e4cb19492799","Type":"ContainerStarted","Data":"2469c3443171d22736c5fc03e622c439cb157c50540431830c35424a17ba6b62"} Jan 22 07:28:06 crc kubenswrapper[4982]: E0122 07:28:06.578785 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" Jan 22 07:28:06 crc kubenswrapper[4982]: E0122 07:28:06.581656 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.642454 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.6424365930000002 podStartE2EDuration="3.642436593s" podCreationTimestamp="2026-01-22 07:28:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:28:06.639568756 +0000 UTC m=+6147.478206759" watchObservedRunningTime="2026-01-22 07:28:06.642436593 +0000 UTC m=+6147.481074606" Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.697713 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=11.697685954 podStartE2EDuration="11.697685954s" podCreationTimestamp="2026-01-22 07:27:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:28:06.688940198 +0000 UTC m=+6147.527578221" watchObservedRunningTime="2026-01-22 07:28:06.697685954 +0000 UTC m=+6147.536323967" Jan 22 07:28:06 crc kubenswrapper[4982]: I0122 07:28:06.967130 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.071613 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs\") pod \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.071680 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data\") pod \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.071730 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key\") pod \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.071816 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d4bz\" (UniqueName: \"kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz\") pod \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.071841 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts\") pod \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\" (UID: \"339a4c9e-4b9f-4806-9506-272ad5d7fa89\") " Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072030 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs" (OuterVolumeSpecName: "logs") pod "339a4c9e-4b9f-4806-9506-272ad5d7fa89" (UID: "339a4c9e-4b9f-4806-9506-272ad5d7fa89"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072387 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data" (OuterVolumeSpecName: "config-data") pod "339a4c9e-4b9f-4806-9506-272ad5d7fa89" (UID: "339a4c9e-4b9f-4806-9506-272ad5d7fa89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts" (OuterVolumeSpecName: "scripts") pod "339a4c9e-4b9f-4806-9506-272ad5d7fa89" (UID: "339a4c9e-4b9f-4806-9506-272ad5d7fa89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072821 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/339a4c9e-4b9f-4806-9506-272ad5d7fa89-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072870 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.072884 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/339a4c9e-4b9f-4806-9506-272ad5d7fa89-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.076987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz" (OuterVolumeSpecName: "kube-api-access-8d4bz") pod "339a4c9e-4b9f-4806-9506-272ad5d7fa89" (UID: "339a4c9e-4b9f-4806-9506-272ad5d7fa89"). InnerVolumeSpecName "kube-api-access-8d4bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.080009 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "339a4c9e-4b9f-4806-9506-272ad5d7fa89" (UID: "339a4c9e-4b9f-4806-9506-272ad5d7fa89"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.175166 4982 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/339a4c9e-4b9f-4806-9506-272ad5d7fa89-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.175206 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d4bz\" (UniqueName: \"kubernetes.io/projected/339a4c9e-4b9f-4806-9506-272ad5d7fa89-kube-api-access-8d4bz\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.584809 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerStarted","Data":"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1"} Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.586533 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d5fcfdbf-4w98n" event={"ID":"339a4c9e-4b9f-4806-9506-272ad5d7fa89","Type":"ContainerDied","Data":"b54a51a19fe765413eead1b4036d34287acbf2ab9119586b0abb21357b95241b"} Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.586547 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d5fcfdbf-4w98n" Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.658462 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.671727 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-64d5fcfdbf-4w98n"] Jan 22 07:28:07 crc kubenswrapper[4982]: I0122 07:28:07.730165 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="339a4c9e-4b9f-4806-9506-272ad5d7fa89" path="/var/lib/kubelet/pods/339a4c9e-4b9f-4806-9506-272ad5d7fa89/volumes" Jan 22 07:28:09 crc kubenswrapper[4982]: I0122 07:28:09.609778 4982 generic.go:334] "Generic (PLEG): container finished" podID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerID="ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1" exitCode=0 Jan 22 07:28:09 crc kubenswrapper[4982]: I0122 07:28:09.609891 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerDied","Data":"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1"} Jan 22 07:28:12 crc kubenswrapper[4982]: I0122 07:28:12.653015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerStarted","Data":"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0"} Jan 22 07:28:12 crc kubenswrapper[4982]: I0122 07:28:12.682866 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vmc98" podStartSLOduration=8.052124399 podStartE2EDuration="12.682820182s" podCreationTimestamp="2026-01-22 07:28:00 +0000 UTC" firstStartedPulling="2026-01-22 07:28:06.559220106 +0000 UTC m=+6147.397858109" lastFinishedPulling="2026-01-22 07:28:11.189915889 +0000 UTC m=+6152.028553892" observedRunningTime="2026-01-22 07:28:12.67460284 +0000 UTC m=+6153.513240843" watchObservedRunningTime="2026-01-22 07:28:12.682820182 +0000 UTC m=+6153.521458185" Jan 22 07:28:12 crc kubenswrapper[4982]: I0122 07:28:12.718920 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:28:12 crc kubenswrapper[4982]: E0122 07:28:12.719239 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:28:13 crc kubenswrapper[4982]: I0122 07:28:13.960180 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:13 crc kubenswrapper[4982]: I0122 07:28:13.961413 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:14 crc kubenswrapper[4982]: I0122 07:28:14.012609 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:14 crc kubenswrapper[4982]: I0122 07:28:14.063557 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:14 crc kubenswrapper[4982]: I0122 07:28:14.687980 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:14 crc kubenswrapper[4982]: I0122 07:28:14.688323 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:15 crc kubenswrapper[4982]: I0122 07:28:15.810953 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 07:28:15 crc kubenswrapper[4982]: I0122 07:28:15.811009 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 07:28:15 crc kubenswrapper[4982]: I0122 07:28:15.842409 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 07:28:15 crc kubenswrapper[4982]: I0122 07:28:15.859713 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 07:28:16 crc kubenswrapper[4982]: I0122 07:28:16.706405 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:28:16 crc kubenswrapper[4982]: I0122 07:28:16.706445 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:28:16 crc kubenswrapper[4982]: I0122 07:28:16.707016 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 07:28:16 crc kubenswrapper[4982]: I0122 07:28:16.707058 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.548098 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.552709 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.831652 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.840203 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.841081 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.904277 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.904339 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:17 crc kubenswrapper[4982]: I0122 07:28:17.904484 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5dsg\" (UniqueName: \"kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.006125 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5dsg\" (UniqueName: \"kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.006247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.006273 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.006781 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.006830 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.032776 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5dsg\" (UniqueName: \"kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg\") pod \"redhat-marketplace-rgg65\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.168352 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.663693 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:18 crc kubenswrapper[4982]: I0122 07:28:18.747278 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerStarted","Data":"8459cb8da17aae8f50a7d2233e89420fd2b7e5450d9688c124d7bc698e95493f"} Jan 22 07:28:19 crc kubenswrapper[4982]: I0122 07:28:19.215056 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 07:28:19 crc kubenswrapper[4982]: I0122 07:28:19.215462 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 07:28:19 crc kubenswrapper[4982]: I0122 07:28:19.219270 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 07:28:20 crc kubenswrapper[4982]: I0122 07:28:20.767152 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerStarted","Data":"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45"} Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.108773 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.108827 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.168148 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.793628 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerStarted","Data":"f66ba423e5388a3467b72f78b4b8628e57c3f8d5a2fa65e7cd01073a180011cc"} Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.794030 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerStarted","Data":"cef9c0b9e5f33e5e1998a211a59e1e86cf6e7d633445bc7eab5813f495745768"} Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.798228 4982 generic.go:334] "Generic (PLEG): container finished" podID="480843e6-45e2-46fb-b2d8-58a907124635" containerID="b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45" exitCode=0 Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.798326 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerDied","Data":"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45"} Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.826780 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-99b96d4f5-7d75d" podStartSLOduration=2.270033905 podStartE2EDuration="31.826762696s" podCreationTimestamp="2026-01-22 07:27:50 +0000 UTC" firstStartedPulling="2026-01-22 07:27:51.630815664 +0000 UTC m=+6132.469453667" lastFinishedPulling="2026-01-22 07:28:21.187544455 +0000 UTC m=+6162.026182458" observedRunningTime="2026-01-22 07:28:21.820760794 +0000 UTC m=+6162.659398797" watchObservedRunningTime="2026-01-22 07:28:21.826762696 +0000 UTC m=+6162.665400699" Jan 22 07:28:21 crc kubenswrapper[4982]: I0122 07:28:21.856910 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:22 crc kubenswrapper[4982]: I0122 07:28:22.813957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerStarted","Data":"8d017b3574f9aa120fe25a5c340b06c59625793b2ba25362425a54699db1d65a"} Jan 22 07:28:22 crc kubenswrapper[4982]: I0122 07:28:22.814340 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerStarted","Data":"2dc0d63afa106ab83e3d91c78e49bc6f2e76b08fd10599993c145d2acf78598c"} Jan 22 07:28:22 crc kubenswrapper[4982]: I0122 07:28:22.835441 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b44bfdd57-k7hnf" podStartSLOduration=-9223372005.019352 podStartE2EDuration="31.835424663s" podCreationTimestamp="2026-01-22 07:27:51 +0000 UTC" firstStartedPulling="2026-01-22 07:27:52.083590561 +0000 UTC m=+6132.922228564" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:28:22.830676834 +0000 UTC m=+6163.669314837" watchObservedRunningTime="2026-01-22 07:28:22.835424663 +0000 UTC m=+6163.674062666" Jan 22 07:28:23 crc kubenswrapper[4982]: I0122 07:28:23.596048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:23 crc kubenswrapper[4982]: I0122 07:28:23.822480 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vmc98" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="registry-server" containerID="cri-o://5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0" gracePeriod=2 Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.408431 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.538033 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content\") pod \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.538294 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities\") pod \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.538323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf4ln\" (UniqueName: \"kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln\") pod \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\" (UID: \"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2\") " Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.539619 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities" (OuterVolumeSpecName: "utilities") pod "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" (UID: "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.568431 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln" (OuterVolumeSpecName: "kube-api-access-zf4ln") pod "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" (UID: "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2"). InnerVolumeSpecName "kube-api-access-zf4ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.642366 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.642452 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf4ln\" (UniqueName: \"kubernetes.io/projected/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-kube-api-access-zf4ln\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.680830 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" (UID: "27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.744668 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.837103 4982 generic.go:334] "Generic (PLEG): container finished" podID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerID="5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0" exitCode=0 Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.837184 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmc98" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.837248 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerDied","Data":"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0"} Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.837296 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmc98" event={"ID":"27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2","Type":"ContainerDied","Data":"1d2ef909ce1494430e79f00cee14e074788b0bb299bfaaf9516882bc54524c7c"} Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.837341 4982 scope.go:117] "RemoveContainer" containerID="5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.843216 4982 generic.go:334] "Generic (PLEG): container finished" podID="480843e6-45e2-46fb-b2d8-58a907124635" containerID="1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e" exitCode=0 Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.843279 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerDied","Data":"1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e"} Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.885308 4982 scope.go:117] "RemoveContainer" containerID="ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.913601 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.935065 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vmc98"] Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.935527 4982 scope.go:117] "RemoveContainer" containerID="009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.955837 4982 scope.go:117] "RemoveContainer" containerID="5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0" Jan 22 07:28:24 crc kubenswrapper[4982]: E0122 07:28:24.968079 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0\": container with ID starting with 5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0 not found: ID does not exist" containerID="5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.968154 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0"} err="failed to get container status \"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0\": rpc error: code = NotFound desc = could not find container \"5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0\": container with ID starting with 5f4308bb3d097498f3c0b4fe3cba3860441acc7f0ba14993a65f1f67e065fcd0 not found: ID does not exist" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.968183 4982 scope.go:117] "RemoveContainer" containerID="ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1" Jan 22 07:28:24 crc kubenswrapper[4982]: E0122 07:28:24.968608 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1\": container with ID starting with ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1 not found: ID does not exist" containerID="ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.968651 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1"} err="failed to get container status \"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1\": rpc error: code = NotFound desc = could not find container \"ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1\": container with ID starting with ee11598309f9fa49eaaaf98ab243340a0adc385eaa3b383a238c69b46e20d8e1 not found: ID does not exist" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.968678 4982 scope.go:117] "RemoveContainer" containerID="009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70" Jan 22 07:28:24 crc kubenswrapper[4982]: E0122 07:28:24.968910 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70\": container with ID starting with 009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70 not found: ID does not exist" containerID="009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70" Jan 22 07:28:24 crc kubenswrapper[4982]: I0122 07:28:24.968933 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70"} err="failed to get container status \"009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70\": rpc error: code = NotFound desc = could not find container \"009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70\": container with ID starting with 009d5250d6065d20d7364b3f9a327ef5ac2d603528bbec3e2106a4894b530a70 not found: ID does not exist" Jan 22 07:28:25 crc kubenswrapper[4982]: I0122 07:28:25.734444 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" path="/var/lib/kubelet/pods/27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2/volumes" Jan 22 07:28:26 crc kubenswrapper[4982]: I0122 07:28:26.718923 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:28:26 crc kubenswrapper[4982]: E0122 07:28:26.719534 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:28:26 crc kubenswrapper[4982]: I0122 07:28:26.865270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerStarted","Data":"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617"} Jan 22 07:28:26 crc kubenswrapper[4982]: I0122 07:28:26.890991 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rgg65" podStartSLOduration=5.6277052340000004 podStartE2EDuration="9.890972815s" podCreationTimestamp="2026-01-22 07:28:17 +0000 UTC" firstStartedPulling="2026-01-22 07:28:21.801291568 +0000 UTC m=+6162.639929571" lastFinishedPulling="2026-01-22 07:28:26.064559149 +0000 UTC m=+6166.903197152" observedRunningTime="2026-01-22 07:28:26.885435495 +0000 UTC m=+6167.724073508" watchObservedRunningTime="2026-01-22 07:28:26.890972815 +0000 UTC m=+6167.729610818" Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.053045 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ab6e-account-create-update-df4b7"] Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.075366 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-ft4hj"] Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.094267 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-ab6e-account-create-update-df4b7"] Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.107269 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-ft4hj"] Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.734577 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d25ac88-1252-411d-97c7-a3516d6cf5a1" path="/var/lib/kubelet/pods/3d25ac88-1252-411d-97c7-a3516d6cf5a1/volumes" Jan 22 07:28:27 crc kubenswrapper[4982]: I0122 07:28:27.736781 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4863b84e-5bd6-49d3-9b48-33d8beaeccab" path="/var/lib/kubelet/pods/4863b84e-5bd6-49d3-9b48-33d8beaeccab/volumes" Jan 22 07:28:28 crc kubenswrapper[4982]: I0122 07:28:28.168562 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:28 crc kubenswrapper[4982]: I0122 07:28:28.168616 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:28 crc kubenswrapper[4982]: I0122 07:28:28.216444 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:31 crc kubenswrapper[4982]: I0122 07:28:31.065175 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:28:31 crc kubenswrapper[4982]: I0122 07:28:31.066189 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:28:31 crc kubenswrapper[4982]: I0122 07:28:31.068200 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.106:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.106:8080: connect: connection refused" Jan 22 07:28:31 crc kubenswrapper[4982]: I0122 07:28:31.617010 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:28:31 crc kubenswrapper[4982]: I0122 07:28:31.617938 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:28:36 crc kubenswrapper[4982]: I0122 07:28:36.045007 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-znmfv"] Jan 22 07:28:36 crc kubenswrapper[4982]: I0122 07:28:36.052481 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-znmfv"] Jan 22 07:28:37 crc kubenswrapper[4982]: I0122 07:28:37.726640 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:28:37 crc kubenswrapper[4982]: E0122 07:28:37.727384 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:28:37 crc kubenswrapper[4982]: I0122 07:28:37.735385 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6053280f-39c8-494c-8e47-b0aad9d7f58e" path="/var/lib/kubelet/pods/6053280f-39c8-494c-8e47-b0aad9d7f58e/volumes" Jan 22 07:28:38 crc kubenswrapper[4982]: I0122 07:28:38.226907 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:38 crc kubenswrapper[4982]: I0122 07:28:38.281929 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:38 crc kubenswrapper[4982]: I0122 07:28:38.986491 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rgg65" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="registry-server" containerID="cri-o://54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617" gracePeriod=2 Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.433009 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.552691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities\") pod \"480843e6-45e2-46fb-b2d8-58a907124635\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.552736 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content\") pod \"480843e6-45e2-46fb-b2d8-58a907124635\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.552810 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5dsg\" (UniqueName: \"kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg\") pod \"480843e6-45e2-46fb-b2d8-58a907124635\" (UID: \"480843e6-45e2-46fb-b2d8-58a907124635\") " Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.554755 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities" (OuterVolumeSpecName: "utilities") pod "480843e6-45e2-46fb-b2d8-58a907124635" (UID: "480843e6-45e2-46fb-b2d8-58a907124635"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.561818 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg" (OuterVolumeSpecName: "kube-api-access-n5dsg") pod "480843e6-45e2-46fb-b2d8-58a907124635" (UID: "480843e6-45e2-46fb-b2d8-58a907124635"). InnerVolumeSpecName "kube-api-access-n5dsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.578564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "480843e6-45e2-46fb-b2d8-58a907124635" (UID: "480843e6-45e2-46fb-b2d8-58a907124635"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.655372 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.655404 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480843e6-45e2-46fb-b2d8-58a907124635-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.655415 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5dsg\" (UniqueName: \"kubernetes.io/projected/480843e6-45e2-46fb-b2d8-58a907124635-kube-api-access-n5dsg\") on node \"crc\" DevicePath \"\"" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.995520 4982 generic.go:334] "Generic (PLEG): container finished" podID="480843e6-45e2-46fb-b2d8-58a907124635" containerID="54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617" exitCode=0 Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.995556 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerDied","Data":"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617"} Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.995581 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgg65" event={"ID":"480843e6-45e2-46fb-b2d8-58a907124635","Type":"ContainerDied","Data":"8459cb8da17aae8f50a7d2233e89420fd2b7e5450d9688c124d7bc698e95493f"} Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.995596 4982 scope.go:117] "RemoveContainer" containerID="54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617" Jan 22 07:28:39 crc kubenswrapper[4982]: I0122 07:28:39.995698 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgg65" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.022636 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.031033 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgg65"] Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.031241 4982 scope.go:117] "RemoveContainer" containerID="1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.056314 4982 scope.go:117] "RemoveContainer" containerID="b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.097154 4982 scope.go:117] "RemoveContainer" containerID="54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617" Jan 22 07:28:40 crc kubenswrapper[4982]: E0122 07:28:40.097476 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617\": container with ID starting with 54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617 not found: ID does not exist" containerID="54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.097506 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617"} err="failed to get container status \"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617\": rpc error: code = NotFound desc = could not find container \"54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617\": container with ID starting with 54659bb5aef62bde46dbdf65dd1f6cfc14368c65fc3c04d178b609c0dbf1a617 not found: ID does not exist" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.097526 4982 scope.go:117] "RemoveContainer" containerID="1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e" Jan 22 07:28:40 crc kubenswrapper[4982]: E0122 07:28:40.097725 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e\": container with ID starting with 1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e not found: ID does not exist" containerID="1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.097742 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e"} err="failed to get container status \"1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e\": rpc error: code = NotFound desc = could not find container \"1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e\": container with ID starting with 1d243c90c6e9dc7a8f3aad1bb9ddc14e9a4f8e7f5242a5d2c2cef60bd40ab68e not found: ID does not exist" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.097755 4982 scope.go:117] "RemoveContainer" containerID="b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45" Jan 22 07:28:40 crc kubenswrapper[4982]: E0122 07:28:40.098232 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45\": container with ID starting with b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45 not found: ID does not exist" containerID="b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45" Jan 22 07:28:40 crc kubenswrapper[4982]: I0122 07:28:40.098253 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45"} err="failed to get container status \"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45\": rpc error: code = NotFound desc = could not find container \"b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45\": container with ID starting with b165dd8b10721c4dd8dd9bffa80e09bd1218ad676406bfa45660b2981fdcbb45 not found: ID does not exist" Jan 22 07:28:41 crc kubenswrapper[4982]: I0122 07:28:41.065775 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.106:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.106:8080: connect: connection refused" Jan 22 07:28:41 crc kubenswrapper[4982]: I0122 07:28:41.619766 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.107:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.107:8080: connect: connection refused" Jan 22 07:28:41 crc kubenswrapper[4982]: I0122 07:28:41.747621 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="480843e6-45e2-46fb-b2d8-58a907124635" path="/var/lib/kubelet/pods/480843e6-45e2-46fb-b2d8-58a907124635/volumes" Jan 22 07:28:52 crc kubenswrapper[4982]: I0122 07:28:52.719828 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:28:52 crc kubenswrapper[4982]: E0122 07:28:52.720735 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:28:52 crc kubenswrapper[4982]: I0122 07:28:52.952082 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:28:53 crc kubenswrapper[4982]: I0122 07:28:53.453208 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:28:54 crc kubenswrapper[4982]: I0122 07:28:54.555253 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:28:55 crc kubenswrapper[4982]: I0122 07:28:55.121465 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:28:55 crc kubenswrapper[4982]: I0122 07:28:55.187494 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:28:55 crc kubenswrapper[4982]: I0122 07:28:55.187710 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon-log" containerID="cri-o://cef9c0b9e5f33e5e1998a211a59e1e86cf6e7d633445bc7eab5813f495745768" gracePeriod=30 Jan 22 07:28:55 crc kubenswrapper[4982]: I0122 07:28:55.187873 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" containerID="cri-o://f66ba423e5388a3467b72f78b4b8628e57c3f8d5a2fa65e7cd01073a180011cc" gracePeriod=30 Jan 22 07:28:59 crc kubenswrapper[4982]: I0122 07:28:59.211734 4982 generic.go:334] "Generic (PLEG): container finished" podID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerID="f66ba423e5388a3467b72f78b4b8628e57c3f8d5a2fa65e7cd01073a180011cc" exitCode=0 Jan 22 07:28:59 crc kubenswrapper[4982]: I0122 07:28:59.211941 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerDied","Data":"f66ba423e5388a3467b72f78b4b8628e57c3f8d5a2fa65e7cd01073a180011cc"} Jan 22 07:29:01 crc kubenswrapper[4982]: I0122 07:29:01.066078 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.106:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.106:8080: connect: connection refused" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.670892 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5cb8668ff7-jmj7z"] Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671555 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671566 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671598 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671604 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671613 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="extract-utilities" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671619 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="extract-utilities" Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671632 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="extract-content" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671637 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="extract-content" Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671649 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="extract-utilities" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671655 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="extract-utilities" Jan 22 07:29:04 crc kubenswrapper[4982]: E0122 07:29:04.671669 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="extract-content" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671674 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="extract-content" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671839 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="480843e6-45e2-46fb-b2d8-58a907124635" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.671858 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e6c0cf-5e12-4a5b-b1ed-ec10dba19ca2" containerName="registry-server" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.672778 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.695653 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb8668ff7-jmj7z"] Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.721042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-scripts\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.721094 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e687c53-388f-46e5-ae9e-b42960a66206-logs\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.721186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmbxp\" (UniqueName: \"kubernetes.io/projected/3e687c53-388f-46e5-ae9e-b42960a66206-kube-api-access-kmbxp\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.721334 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-config-data\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.722297 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e687c53-388f-46e5-ae9e-b42960a66206-horizon-secret-key\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.824435 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmbxp\" (UniqueName: \"kubernetes.io/projected/3e687c53-388f-46e5-ae9e-b42960a66206-kube-api-access-kmbxp\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.824606 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-config-data\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.824744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e687c53-388f-46e5-ae9e-b42960a66206-horizon-secret-key\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.825004 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-scripts\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.825076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e687c53-388f-46e5-ae9e-b42960a66206-logs\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.826381 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e687c53-388f-46e5-ae9e-b42960a66206-logs\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.827659 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-scripts\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.828247 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e687c53-388f-46e5-ae9e-b42960a66206-config-data\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.834424 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e687c53-388f-46e5-ae9e-b42960a66206-horizon-secret-key\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.841517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmbxp\" (UniqueName: \"kubernetes.io/projected/3e687c53-388f-46e5-ae9e-b42960a66206-kube-api-access-kmbxp\") pod \"horizon-5cb8668ff7-jmj7z\" (UID: \"3e687c53-388f-46e5-ae9e-b42960a66206\") " pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:04 crc kubenswrapper[4982]: I0122 07:29:04.989248 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:05 crc kubenswrapper[4982]: I0122 07:29:05.478402 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb8668ff7-jmj7z"] Jan 22 07:29:05 crc kubenswrapper[4982]: I0122 07:29:05.719583 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:29:05 crc kubenswrapper[4982]: E0122 07:29:05.720173 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.130115 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-9rx9z"] Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.131843 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.137741 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-9rx9z"] Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.153944 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.154085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqh6c\" (UniqueName: \"kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.177430 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-3c01-account-create-update-lwkdv"] Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.178733 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.190701 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.228943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-3c01-account-create-update-lwkdv"] Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.256116 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqh6c\" (UniqueName: \"kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.256220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nbtq\" (UniqueName: \"kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.256320 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.256408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.257250 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.280549 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqh6c\" (UniqueName: \"kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c\") pod \"heat-db-create-9rx9z\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.299034 4982 scope.go:117] "RemoveContainer" containerID="99a0f56c5828f9782f18c1f6c2faacc0b58155eb29ea76f833485d0469a33480" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.314745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb8668ff7-jmj7z" event={"ID":"3e687c53-388f-46e5-ae9e-b42960a66206","Type":"ContainerStarted","Data":"16bdf8113b376e1012112c9157d55a08f8c8a142be6828b622cdadd451eab865"} Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.314909 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb8668ff7-jmj7z" event={"ID":"3e687c53-388f-46e5-ae9e-b42960a66206","Type":"ContainerStarted","Data":"8dbc28e470b31f978a8a5aa5bfafd725126be2b998ce3b76cf87d511048cbbdc"} Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.314991 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb8668ff7-jmj7z" event={"ID":"3e687c53-388f-46e5-ae9e-b42960a66206","Type":"ContainerStarted","Data":"542f14bd7f9fdc82cd96d5da61c6eae40935182bd99ee095c82729607550ee78"} Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.350033 4982 scope.go:117] "RemoveContainer" containerID="a3d01621d72192575e6823c1fb042d035d1e8a45e4efa60a65e9baaf5a78f9d5" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.359360 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nbtq\" (UniqueName: \"kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.359481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.360471 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.377638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nbtq\" (UniqueName: \"kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq\") pod \"heat-3c01-account-create-update-lwkdv\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.388603 4982 scope.go:117] "RemoveContainer" containerID="af8101c6965c15ad53a74bcaca55d2812eb3729dc4e31240591b9a9e16ace35c" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.489669 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:06 crc kubenswrapper[4982]: I0122 07:29:06.526168 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.013959 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5cb8668ff7-jmj7z" podStartSLOduration=3.013943695 podStartE2EDuration="3.013943695s" podCreationTimestamp="2026-01-22 07:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:29:06.34678613 +0000 UTC m=+6207.185424133" watchObservedRunningTime="2026-01-22 07:29:07.013943695 +0000 UTC m=+6207.852581698" Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.014403 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-9rx9z"] Jan 22 07:29:07 crc kubenswrapper[4982]: W0122 07:29:07.017229 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bd94f5c_8bb0_4974_925e_e4b48d664221.slice/crio-15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0 WatchSource:0}: Error finding container 15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0: Status 404 returned error can't find the container with id 15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0 Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.234462 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-3c01-account-create-update-lwkdv"] Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.324534 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c01-account-create-update-lwkdv" event={"ID":"56983423-387d-434d-a31a-44ea32d0d01a","Type":"ContainerStarted","Data":"93075a1b011ef77b9384fbbf6b765e8b9b81edd2637f0532d67b11f89e81b29f"} Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.326313 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-9rx9z" event={"ID":"8bd94f5c-8bb0-4974-925e-e4b48d664221","Type":"ContainerStarted","Data":"5a3b8ac743fb97c8f64a0a8abf8b7c3aafd9b54fd6f13f9f8b4f9aeac1c4e7a7"} Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.326368 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-9rx9z" event={"ID":"8bd94f5c-8bb0-4974-925e-e4b48d664221","Type":"ContainerStarted","Data":"15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0"} Jan 22 07:29:07 crc kubenswrapper[4982]: I0122 07:29:07.345305 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-create-9rx9z" podStartSLOduration=1.345282133 podStartE2EDuration="1.345282133s" podCreationTimestamp="2026-01-22 07:29:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:29:07.338273263 +0000 UTC m=+6208.176911266" watchObservedRunningTime="2026-01-22 07:29:07.345282133 +0000 UTC m=+6208.183920136" Jan 22 07:29:08 crc kubenswrapper[4982]: I0122 07:29:08.337406 4982 generic.go:334] "Generic (PLEG): container finished" podID="8bd94f5c-8bb0-4974-925e-e4b48d664221" containerID="5a3b8ac743fb97c8f64a0a8abf8b7c3aafd9b54fd6f13f9f8b4f9aeac1c4e7a7" exitCode=0 Jan 22 07:29:08 crc kubenswrapper[4982]: I0122 07:29:08.337494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-9rx9z" event={"ID":"8bd94f5c-8bb0-4974-925e-e4b48d664221","Type":"ContainerDied","Data":"5a3b8ac743fb97c8f64a0a8abf8b7c3aafd9b54fd6f13f9f8b4f9aeac1c4e7a7"} Jan 22 07:29:08 crc kubenswrapper[4982]: I0122 07:29:08.340517 4982 generic.go:334] "Generic (PLEG): container finished" podID="56983423-387d-434d-a31a-44ea32d0d01a" containerID="722ac6b65734b895ac457d4167627ac54d52dea61dd6e589d66116aed112ce8e" exitCode=0 Jan 22 07:29:08 crc kubenswrapper[4982]: I0122 07:29:08.340569 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c01-account-create-update-lwkdv" event={"ID":"56983423-387d-434d-a31a-44ea32d0d01a","Type":"ContainerDied","Data":"722ac6b65734b895ac457d4167627ac54d52dea61dd6e589d66116aed112ce8e"} Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.808108 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.816776 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.936137 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqh6c\" (UniqueName: \"kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c\") pod \"8bd94f5c-8bb0-4974-925e-e4b48d664221\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.936291 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts\") pod \"56983423-387d-434d-a31a-44ea32d0d01a\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.936354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts\") pod \"8bd94f5c-8bb0-4974-925e-e4b48d664221\" (UID: \"8bd94f5c-8bb0-4974-925e-e4b48d664221\") " Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.936399 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nbtq\" (UniqueName: \"kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq\") pod \"56983423-387d-434d-a31a-44ea32d0d01a\" (UID: \"56983423-387d-434d-a31a-44ea32d0d01a\") " Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.936947 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56983423-387d-434d-a31a-44ea32d0d01a" (UID: "56983423-387d-434d-a31a-44ea32d0d01a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.937277 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8bd94f5c-8bb0-4974-925e-e4b48d664221" (UID: "8bd94f5c-8bb0-4974-925e-e4b48d664221"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.942839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c" (OuterVolumeSpecName: "kube-api-access-rqh6c") pod "8bd94f5c-8bb0-4974-925e-e4b48d664221" (UID: "8bd94f5c-8bb0-4974-925e-e4b48d664221"). InnerVolumeSpecName "kube-api-access-rqh6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:29:09 crc kubenswrapper[4982]: I0122 07:29:09.943753 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq" (OuterVolumeSpecName: "kube-api-access-4nbtq") pod "56983423-387d-434d-a31a-44ea32d0d01a" (UID: "56983423-387d-434d-a31a-44ea32d0d01a"). InnerVolumeSpecName "kube-api-access-4nbtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.038200 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bd94f5c-8bb0-4974-925e-e4b48d664221-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.038234 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nbtq\" (UniqueName: \"kubernetes.io/projected/56983423-387d-434d-a31a-44ea32d0d01a-kube-api-access-4nbtq\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.038247 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqh6c\" (UniqueName: \"kubernetes.io/projected/8bd94f5c-8bb0-4974-925e-e4b48d664221-kube-api-access-rqh6c\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.038259 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56983423-387d-434d-a31a-44ea32d0d01a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.362338 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c01-account-create-update-lwkdv" event={"ID":"56983423-387d-434d-a31a-44ea32d0d01a","Type":"ContainerDied","Data":"93075a1b011ef77b9384fbbf6b765e8b9b81edd2637f0532d67b11f89e81b29f"} Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.362398 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93075a1b011ef77b9384fbbf6b765e8b9b81edd2637f0532d67b11f89e81b29f" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.362494 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c01-account-create-update-lwkdv" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.369224 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-9rx9z" event={"ID":"8bd94f5c-8bb0-4974-925e-e4b48d664221","Type":"ContainerDied","Data":"15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0"} Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.369277 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15a0b6b3c1e465fc04843b1c835b6e8e615fff0319733fd22e86669f3f6b2ae0" Jan 22 07:29:10 crc kubenswrapper[4982]: I0122 07:29:10.369335 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-9rx9z" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.065905 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.106:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.106:8080: connect: connection refused" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.459518 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-bq959"] Jan 22 07:29:11 crc kubenswrapper[4982]: E0122 07:29:11.460032 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd94f5c-8bb0-4974-925e-e4b48d664221" containerName="mariadb-database-create" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.460056 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd94f5c-8bb0-4974-925e-e4b48d664221" containerName="mariadb-database-create" Jan 22 07:29:11 crc kubenswrapper[4982]: E0122 07:29:11.460083 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56983423-387d-434d-a31a-44ea32d0d01a" containerName="mariadb-account-create-update" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.460092 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="56983423-387d-434d-a31a-44ea32d0d01a" containerName="mariadb-account-create-update" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.460297 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd94f5c-8bb0-4974-925e-e4b48d664221" containerName="mariadb-database-create" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.460326 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="56983423-387d-434d-a31a-44ea32d0d01a" containerName="mariadb-account-create-update" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.460986 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.464696 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.464988 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-lqmrz" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.479478 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-bq959"] Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.571582 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.571656 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7d8b\" (UniqueName: \"kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.572263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.674313 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.674476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.674526 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7d8b\" (UniqueName: \"kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.681119 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.684726 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.694409 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7d8b\" (UniqueName: \"kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b\") pod \"heat-db-sync-bq959\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " pod="openstack/heat-db-sync-bq959" Jan 22 07:29:11 crc kubenswrapper[4982]: I0122 07:29:11.779129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bq959" Jan 22 07:29:12 crc kubenswrapper[4982]: I0122 07:29:12.265705 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-bq959"] Jan 22 07:29:12 crc kubenswrapper[4982]: I0122 07:29:12.407360 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bq959" event={"ID":"df9743d7-745a-45f6-91f0-564f7e4811df","Type":"ContainerStarted","Data":"6388e6360813493bdae23e5d29c9961aa6aa8e10567681ff8a3ce25de4d8066b"} Jan 22 07:29:14 crc kubenswrapper[4982]: I0122 07:29:14.989607 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:14 crc kubenswrapper[4982]: I0122 07:29:14.990394 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:16 crc kubenswrapper[4982]: I0122 07:29:16.719616 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:29:16 crc kubenswrapper[4982]: E0122 07:29:16.720252 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:29:21 crc kubenswrapper[4982]: I0122 07:29:21.066593 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-99b96d4f5-7d75d" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.106:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.106:8080: connect: connection refused" Jan 22 07:29:21 crc kubenswrapper[4982]: I0122 07:29:21.067318 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:29:21 crc kubenswrapper[4982]: I0122 07:29:21.502754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bq959" event={"ID":"df9743d7-745a-45f6-91f0-564f7e4811df","Type":"ContainerStarted","Data":"c1afb082fedd7ea8afbe222d864b044d9d7891146609b0fbf695ff894e938216"} Jan 22 07:29:21 crc kubenswrapper[4982]: I0122 07:29:21.535036 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-bq959" podStartSLOduration=2.063179302 podStartE2EDuration="10.535016208s" podCreationTimestamp="2026-01-22 07:29:11 +0000 UTC" firstStartedPulling="2026-01-22 07:29:12.266646374 +0000 UTC m=+6213.105284377" lastFinishedPulling="2026-01-22 07:29:20.73848328 +0000 UTC m=+6221.577121283" observedRunningTime="2026-01-22 07:29:21.531320249 +0000 UTC m=+6222.369958272" watchObservedRunningTime="2026-01-22 07:29:21.535016208 +0000 UTC m=+6222.373654211" Jan 22 07:29:24 crc kubenswrapper[4982]: I0122 07:29:24.548063 4982 generic.go:334] "Generic (PLEG): container finished" podID="df9743d7-745a-45f6-91f0-564f7e4811df" containerID="c1afb082fedd7ea8afbe222d864b044d9d7891146609b0fbf695ff894e938216" exitCode=0 Jan 22 07:29:24 crc kubenswrapper[4982]: I0122 07:29:24.548240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bq959" event={"ID":"df9743d7-745a-45f6-91f0-564f7e4811df","Type":"ContainerDied","Data":"c1afb082fedd7ea8afbe222d864b044d9d7891146609b0fbf695ff894e938216"} Jan 22 07:29:24 crc kubenswrapper[4982]: I0122 07:29:24.991644 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cb8668ff7-jmj7z" podUID="3e687c53-388f-46e5-ae9e-b42960a66206" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.564490 4982 generic.go:334] "Generic (PLEG): container finished" podID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerID="cef9c0b9e5f33e5e1998a211a59e1e86cf6e7d633445bc7eab5813f495745768" exitCode=137 Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.564594 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerDied","Data":"cef9c0b9e5f33e5e1998a211a59e1e86cf6e7d633445bc7eab5813f495745768"} Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.565043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-99b96d4f5-7d75d" event={"ID":"d6de4e3a-13ce-476a-bff4-cdabdcc138c8","Type":"ContainerDied","Data":"e67e2798d1be111fb785d69ab542bea6a4a1a5165a6c4565cac7c0b117b27180"} Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.565061 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e67e2798d1be111fb785d69ab542bea6a4a1a5165a6c4565cac7c0b117b27180" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.616465 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782008 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxhq7\" (UniqueName: \"kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7\") pod \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782081 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts\") pod \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782119 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key\") pod \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782183 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data\") pod \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782289 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs\") pod \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\" (UID: \"d6de4e3a-13ce-476a-bff4-cdabdcc138c8\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.782802 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs" (OuterVolumeSpecName: "logs") pod "d6de4e3a-13ce-476a-bff4-cdabdcc138c8" (UID: "d6de4e3a-13ce-476a-bff4-cdabdcc138c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.801452 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7" (OuterVolumeSpecName: "kube-api-access-pxhq7") pod "d6de4e3a-13ce-476a-bff4-cdabdcc138c8" (UID: "d6de4e3a-13ce-476a-bff4-cdabdcc138c8"). InnerVolumeSpecName "kube-api-access-pxhq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.802193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d6de4e3a-13ce-476a-bff4-cdabdcc138c8" (UID: "d6de4e3a-13ce-476a-bff4-cdabdcc138c8"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.824740 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts" (OuterVolumeSpecName: "scripts") pod "d6de4e3a-13ce-476a-bff4-cdabdcc138c8" (UID: "d6de4e3a-13ce-476a-bff4-cdabdcc138c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.824809 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data" (OuterVolumeSpecName: "config-data") pod "d6de4e3a-13ce-476a-bff4-cdabdcc138c8" (UID: "d6de4e3a-13ce-476a-bff4-cdabdcc138c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.878200 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bq959" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.884225 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxhq7\" (UniqueName: \"kubernetes.io/projected/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-kube-api-access-pxhq7\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.884255 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.884264 4982 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.884272 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.884282 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6de4e3a-13ce-476a-bff4-cdabdcc138c8-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.985993 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7d8b\" (UniqueName: \"kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b\") pod \"df9743d7-745a-45f6-91f0-564f7e4811df\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.986056 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data\") pod \"df9743d7-745a-45f6-91f0-564f7e4811df\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.986182 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle\") pod \"df9743d7-745a-45f6-91f0-564f7e4811df\" (UID: \"df9743d7-745a-45f6-91f0-564f7e4811df\") " Jan 22 07:29:25 crc kubenswrapper[4982]: I0122 07:29:25.990774 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b" (OuterVolumeSpecName: "kube-api-access-v7d8b") pod "df9743d7-745a-45f6-91f0-564f7e4811df" (UID: "df9743d7-745a-45f6-91f0-564f7e4811df"). InnerVolumeSpecName "kube-api-access-v7d8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.015562 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df9743d7-745a-45f6-91f0-564f7e4811df" (UID: "df9743d7-745a-45f6-91f0-564f7e4811df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.061153 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data" (OuterVolumeSpecName: "config-data") pod "df9743d7-745a-45f6-91f0-564f7e4811df" (UID: "df9743d7-745a-45f6-91f0-564f7e4811df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.089080 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7d8b\" (UniqueName: \"kubernetes.io/projected/df9743d7-745a-45f6-91f0-564f7e4811df-kube-api-access-v7d8b\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.089124 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.089138 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df9743d7-745a-45f6-91f0-564f7e4811df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.580512 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-bq959" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.580532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-bq959" event={"ID":"df9743d7-745a-45f6-91f0-564f7e4811df","Type":"ContainerDied","Data":"6388e6360813493bdae23e5d29c9961aa6aa8e10567681ff8a3ce25de4d8066b"} Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.580564 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6388e6360813493bdae23e5d29c9961aa6aa8e10567681ff8a3ce25de4d8066b" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.580512 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-99b96d4f5-7d75d" Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.634163 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:29:26 crc kubenswrapper[4982]: I0122 07:29:26.643611 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-99b96d4f5-7d75d"] Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.737931 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" path="/var/lib/kubelet/pods/d6de4e3a-13ce-476a-bff4-cdabdcc138c8/volumes" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.859900 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6dd64c5f44-z9ffh"] Jan 22 07:29:27 crc kubenswrapper[4982]: E0122 07:29:27.860319 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860331 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" Jan 22 07:29:27 crc kubenswrapper[4982]: E0122 07:29:27.860367 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9743d7-745a-45f6-91f0-564f7e4811df" containerName="heat-db-sync" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860373 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9743d7-745a-45f6-91f0-564f7e4811df" containerName="heat-db-sync" Jan 22 07:29:27 crc kubenswrapper[4982]: E0122 07:29:27.860383 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon-log" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860389 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon-log" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860541 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon-log" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860550 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6de4e3a-13ce-476a-bff4-cdabdcc138c8" containerName="horizon" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.860573 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="df9743d7-745a-45f6-91f0-564f7e4811df" containerName="heat-db-sync" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.861315 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.865722 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.866140 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-lqmrz" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.868876 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.913897 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6dd64c5f44-z9ffh"] Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.928125 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data-custom\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.928206 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.928366 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-combined-ca-bundle\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.928415 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvckd\" (UniqueName: \"kubernetes.io/projected/d2e332c1-9410-467f-9149-368f1c250c4a-kube-api-access-jvckd\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.936294 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5bf47d4589-c46dt"] Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.937938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.940344 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.953940 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-7b7b667b8-cwq46"] Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.955169 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.956787 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.963934 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5bf47d4589-c46dt"] Jan 22 07:29:27 crc kubenswrapper[4982]: I0122 07:29:27.971731 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7b7b667b8-cwq46"] Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.030678 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.030774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.030831 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-combined-ca-bundle\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.030874 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl2vd\" (UniqueName: \"kubernetes.io/projected/f1905031-b59c-4a3d-86a1-356d3e836819-kube-api-access-wl2vd\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.030926 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-combined-ca-bundle\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031025 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data-custom\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvzpj\" (UniqueName: \"kubernetes.io/projected/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-kube-api-access-xvzpj\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031197 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031316 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data-custom\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031366 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-combined-ca-bundle\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031472 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvckd\" (UniqueName: \"kubernetes.io/projected/d2e332c1-9410-467f-9149-368f1c250c4a-kube-api-access-jvckd\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.031598 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data-custom\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.037160 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.040901 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-config-data-custom\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.055263 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2e332c1-9410-467f-9149-368f1c250c4a-combined-ca-bundle\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.058721 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvckd\" (UniqueName: \"kubernetes.io/projected/d2e332c1-9410-467f-9149-368f1c250c4a-kube-api-access-jvckd\") pod \"heat-engine-6dd64c5f44-z9ffh\" (UID: \"d2e332c1-9410-467f-9149-368f1c250c4a\") " pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.133638 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134050 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-combined-ca-bundle\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134073 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl2vd\" (UniqueName: \"kubernetes.io/projected/f1905031-b59c-4a3d-86a1-356d3e836819-kube-api-access-wl2vd\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134110 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-combined-ca-bundle\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134133 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data-custom\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134154 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvzpj\" (UniqueName: \"kubernetes.io/projected/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-kube-api-access-xvzpj\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.134208 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data-custom\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.145354 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data-custom\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.147484 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data-custom\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.150726 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-config-data\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.153641 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-combined-ca-bundle\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.154096 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl2vd\" (UniqueName: \"kubernetes.io/projected/f1905031-b59c-4a3d-86a1-356d3e836819-kube-api-access-wl2vd\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.154179 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-config-data\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.154908 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1905031-b59c-4a3d-86a1-356d3e836819-combined-ca-bundle\") pod \"heat-api-5bf47d4589-c46dt\" (UID: \"f1905031-b59c-4a3d-86a1-356d3e836819\") " pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.158074 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvzpj\" (UniqueName: \"kubernetes.io/projected/a02ed1aa-0801-4568-b3dd-e6b12107f7e4-kube-api-access-xvzpj\") pod \"heat-cfnapi-7b7b667b8-cwq46\" (UID: \"a02ed1aa-0801-4568-b3dd-e6b12107f7e4\") " pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.210610 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.254597 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.272333 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:28 crc kubenswrapper[4982]: W0122 07:29:28.938118 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2e332c1_9410_467f_9149_368f1c250c4a.slice/crio-645d4b498b79e44721b4fff400ca6c2cc1eaee0bab3eea0e42ded522fde031ba WatchSource:0}: Error finding container 645d4b498b79e44721b4fff400ca6c2cc1eaee0bab3eea0e42ded522fde031ba: Status 404 returned error can't find the container with id 645d4b498b79e44721b4fff400ca6c2cc1eaee0bab3eea0e42ded522fde031ba Jan 22 07:29:28 crc kubenswrapper[4982]: I0122 07:29:28.938712 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6dd64c5f44-z9ffh"] Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.014927 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5bf47d4589-c46dt"] Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.101000 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7b7b667b8-cwq46"] Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.626034 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" event={"ID":"a02ed1aa-0801-4568-b3dd-e6b12107f7e4","Type":"ContainerStarted","Data":"ba0dcb2936a7c916efd9916e4a790902f7f0493b969964c122faa5cde64d14b8"} Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.628781 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd64c5f44-z9ffh" event={"ID":"d2e332c1-9410-467f-9149-368f1c250c4a","Type":"ContainerStarted","Data":"0c2ef96c769483e3435cfd75a1f64097ce08f512e445f0ebcf5d54fc38d3b241"} Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.628976 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.629055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd64c5f44-z9ffh" event={"ID":"d2e332c1-9410-467f-9149-368f1c250c4a","Type":"ContainerStarted","Data":"645d4b498b79e44721b4fff400ca6c2cc1eaee0bab3eea0e42ded522fde031ba"} Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.630358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5bf47d4589-c46dt" event={"ID":"f1905031-b59c-4a3d-86a1-356d3e836819","Type":"ContainerStarted","Data":"1bd0643d1de0dcd2465bf479574bb8cb67ecb295831ce041df8c94008f6490a3"} Jan 22 07:29:29 crc kubenswrapper[4982]: I0122 07:29:29.660593 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6dd64c5f44-z9ffh" podStartSLOduration=2.660562492 podStartE2EDuration="2.660562492s" podCreationTimestamp="2026-01-22 07:29:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:29:29.655454614 +0000 UTC m=+6230.494092647" watchObservedRunningTime="2026-01-22 07:29:29.660562492 +0000 UTC m=+6230.499200495" Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.647996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5bf47d4589-c46dt" event={"ID":"f1905031-b59c-4a3d-86a1-356d3e836819","Type":"ContainerStarted","Data":"1b15dabaa5fc8561529d8a5b11e0e0d4b6aa4ee3c6aee956e1ad43de3ac3eff5"} Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.648615 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.649530 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" event={"ID":"a02ed1aa-0801-4568-b3dd-e6b12107f7e4","Type":"ContainerStarted","Data":"65cfc0ce846e32c99195dca857f4428ada42cfaa4dea666f0759cd35201685cd"} Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.649704 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.667063 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5bf47d4589-c46dt" podStartSLOduration=2.519286127 podStartE2EDuration="4.667043023s" podCreationTimestamp="2026-01-22 07:29:27 +0000 UTC" firstStartedPulling="2026-01-22 07:29:29.012225535 +0000 UTC m=+6229.850863538" lastFinishedPulling="2026-01-22 07:29:31.159982431 +0000 UTC m=+6231.998620434" observedRunningTime="2026-01-22 07:29:31.664904625 +0000 UTC m=+6232.503542628" watchObservedRunningTime="2026-01-22 07:29:31.667043023 +0000 UTC m=+6232.505681026" Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.689094 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" podStartSLOduration=2.647849638 podStartE2EDuration="4.689071288s" podCreationTimestamp="2026-01-22 07:29:27 +0000 UTC" firstStartedPulling="2026-01-22 07:29:29.116235213 +0000 UTC m=+6229.954873216" lastFinishedPulling="2026-01-22 07:29:31.157456863 +0000 UTC m=+6231.996094866" observedRunningTime="2026-01-22 07:29:31.682206572 +0000 UTC m=+6232.520844585" watchObservedRunningTime="2026-01-22 07:29:31.689071288 +0000 UTC m=+6232.527709291" Jan 22 07:29:31 crc kubenswrapper[4982]: I0122 07:29:31.719717 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:29:31 crc kubenswrapper[4982]: E0122 07:29:31.720038 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:29:36 crc kubenswrapper[4982]: I0122 07:29:36.818040 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.041323 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2ffa-account-create-update-g8pwp"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.056050 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-77bf-account-create-update-vqzlx"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.071836 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-w4dxj"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.083028 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2b13-account-create-update-8xmrq"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.093559 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-dcvw4"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.102605 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5t2rq"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.111445 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2b13-account-create-update-8xmrq"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.119749 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-dcvw4"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.128202 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-77bf-account-create-update-vqzlx"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.136201 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2ffa-account-create-update-g8pwp"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.143955 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-w4dxj"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.151543 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5t2rq"] Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.731128 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b6dda5b-717b-4f03-a049-2b848c0c2c27" path="/var/lib/kubelet/pods/5b6dda5b-717b-4f03-a049-2b848c0c2c27/volumes" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.732039 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a220695a-2676-40c9-9488-78b2e093e1cb" path="/var/lib/kubelet/pods/a220695a-2676-40c9-9488-78b2e093e1cb/volumes" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.732637 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6a44b3b-7668-4335-bf14-926e8b32adba" path="/var/lib/kubelet/pods/b6a44b3b-7668-4335-bf14-926e8b32adba/volumes" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.733272 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4366c97-1431-4d9e-9531-3ebf64880826" path="/var/lib/kubelet/pods/c4366c97-1431-4d9e-9531-3ebf64880826/volumes" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.734748 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2e7c369-7f5b-48b3-9303-7ec29f9fd757" path="/var/lib/kubelet/pods/d2e7c369-7f5b-48b3-9303-7ec29f9fd757/volumes" Jan 22 07:29:37 crc kubenswrapper[4982]: I0122 07:29:37.735377 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2d11bc2-7757-402a-b4b6-fe5578b86213" path="/var/lib/kubelet/pods/e2d11bc2-7757-402a-b4b6-fe5578b86213/volumes" Jan 22 07:29:38 crc kubenswrapper[4982]: I0122 07:29:38.618077 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5cb8668ff7-jmj7z" Jan 22 07:29:38 crc kubenswrapper[4982]: I0122 07:29:38.684210 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:29:38 crc kubenswrapper[4982]: I0122 07:29:38.684648 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon-log" containerID="cri-o://2dc0d63afa106ab83e3d91c78e49bc6f2e76b08fd10599993c145d2acf78598c" gracePeriod=30 Jan 22 07:29:38 crc kubenswrapper[4982]: I0122 07:29:38.685018 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" containerID="cri-o://8d017b3574f9aa120fe25a5c340b06c59625793b2ba25362425a54699db1d65a" gracePeriod=30 Jan 22 07:29:39 crc kubenswrapper[4982]: I0122 07:29:39.813109 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-5bf47d4589-c46dt" Jan 22 07:29:39 crc kubenswrapper[4982]: I0122 07:29:39.880131 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-7b7b667b8-cwq46" Jan 22 07:29:42 crc kubenswrapper[4982]: I0122 07:29:42.020093 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.107:8080/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:36508->10.217.1.107:8080: read: connection reset by peer" Jan 22 07:29:42 crc kubenswrapper[4982]: I0122 07:29:42.789463 4982 generic.go:334] "Generic (PLEG): container finished" podID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerID="8d017b3574f9aa120fe25a5c340b06c59625793b2ba25362425a54699db1d65a" exitCode=0 Jan 22 07:29:42 crc kubenswrapper[4982]: I0122 07:29:42.789533 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerDied","Data":"8d017b3574f9aa120fe25a5c340b06c59625793b2ba25362425a54699db1d65a"} Jan 22 07:29:43 crc kubenswrapper[4982]: I0122 07:29:43.721035 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:29:43 crc kubenswrapper[4982]: E0122 07:29:43.721602 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:29:48 crc kubenswrapper[4982]: I0122 07:29:48.238148 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6dd64c5f44-z9ffh" Jan 22 07:29:51 crc kubenswrapper[4982]: I0122 07:29:51.617171 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.107:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.107:8080: connect: connection refused" Jan 22 07:29:54 crc kubenswrapper[4982]: I0122 07:29:54.720653 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:29:54 crc kubenswrapper[4982]: E0122 07:29:54.721739 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:29:56 crc kubenswrapper[4982]: I0122 07:29:56.042800 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tlplj"] Jan 22 07:29:56 crc kubenswrapper[4982]: I0122 07:29:56.054685 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tlplj"] Jan 22 07:29:57 crc kubenswrapper[4982]: I0122 07:29:57.730091 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54b1df53-5b7e-450b-bb04-f93da76ad0a6" path="/var/lib/kubelet/pods/54b1df53-5b7e-450b-bb04-f93da76ad0a6/volumes" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.076419 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx"] Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.078579 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.083297 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.089820 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx"] Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.220007 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.220074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.220472 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7clvv\" (UniqueName: \"kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.322107 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7clvv\" (UniqueName: \"kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.322207 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.322244 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.322818 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.322873 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.346936 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7clvv\" (UniqueName: \"kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.398693 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.936127 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx"] Jan 22 07:29:59 crc kubenswrapper[4982]: I0122 07:29:59.956290 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" event={"ID":"b9612f2b-06bd-4536-ad46-c74dfcd8d517","Type":"ContainerStarted","Data":"19fcb87e2c646e2716917e06a64daca3ae1da6a2b95906b2dfb6d6e7fffd0f79"} Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.158813 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h"] Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.160481 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.163232 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.165156 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.167129 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h"] Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.343820 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.343974 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbkkb\" (UniqueName: \"kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.344311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.446310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.446365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbkkb\" (UniqueName: \"kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.446439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.448289 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.458510 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.467552 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbkkb\" (UniqueName: \"kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb\") pod \"collect-profiles-29484450-7s22h\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.484241 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.974799 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerID="983c10fc114df6ddc59733b7549694dbd1955d9c1515bfe16d307e88b0b3c6c1" exitCode=0 Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.974918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" event={"ID":"b9612f2b-06bd-4536-ad46-c74dfcd8d517","Type":"ContainerDied","Data":"983c10fc114df6ddc59733b7549694dbd1955d9c1515bfe16d307e88b0b3c6c1"} Jan 22 07:30:00 crc kubenswrapper[4982]: I0122 07:30:00.979939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h"] Jan 22 07:30:01 crc kubenswrapper[4982]: I0122 07:30:01.617700 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b44bfdd57-k7hnf" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.107:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.107:8080: connect: connection refused" Jan 22 07:30:01 crc kubenswrapper[4982]: I0122 07:30:01.618241 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:30:01 crc kubenswrapper[4982]: I0122 07:30:01.988360 4982 generic.go:334] "Generic (PLEG): container finished" podID="615a38ca-24cd-4f8c-82cf-764c8f0f68bb" containerID="3d2d6931a346bf696f7542598db763bb327800d989641511b62eb406f5456755" exitCode=0 Jan 22 07:30:01 crc kubenswrapper[4982]: I0122 07:30:01.988403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" event={"ID":"615a38ca-24cd-4f8c-82cf-764c8f0f68bb","Type":"ContainerDied","Data":"3d2d6931a346bf696f7542598db763bb327800d989641511b62eb406f5456755"} Jan 22 07:30:01 crc kubenswrapper[4982]: I0122 07:30:01.988439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" event={"ID":"615a38ca-24cd-4f8c-82cf-764c8f0f68bb","Type":"ContainerStarted","Data":"4b0d2123de086cb65f5e7af81eb359fda256d963a28a1b0a8301493cf36cf534"} Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.002690 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerID="c4fbf3c03b89c19a33c65d0f9d61f096279835b2aba9dde82ea5b7a043d5c30e" exitCode=0 Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.002799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" event={"ID":"b9612f2b-06bd-4536-ad46-c74dfcd8d517","Type":"ContainerDied","Data":"c4fbf3c03b89c19a33c65d0f9d61f096279835b2aba9dde82ea5b7a043d5c30e"} Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.681361 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.812664 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume\") pod \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.812714 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbkkb\" (UniqueName: \"kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb\") pod \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.812969 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume\") pod \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\" (UID: \"615a38ca-24cd-4f8c-82cf-764c8f0f68bb\") " Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.813613 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume" (OuterVolumeSpecName: "config-volume") pod "615a38ca-24cd-4f8c-82cf-764c8f0f68bb" (UID: "615a38ca-24cd-4f8c-82cf-764c8f0f68bb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.838240 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb" (OuterVolumeSpecName: "kube-api-access-zbkkb") pod "615a38ca-24cd-4f8c-82cf-764c8f0f68bb" (UID: "615a38ca-24cd-4f8c-82cf-764c8f0f68bb"). InnerVolumeSpecName "kube-api-access-zbkkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.848614 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "615a38ca-24cd-4f8c-82cf-764c8f0f68bb" (UID: "615a38ca-24cd-4f8c-82cf-764c8f0f68bb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.915569 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.915612 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbkkb\" (UniqueName: \"kubernetes.io/projected/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-kube-api-access-zbkkb\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:03 crc kubenswrapper[4982]: I0122 07:30:03.915628 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/615a38ca-24cd-4f8c-82cf-764c8f0f68bb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.015457 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerID="3ab87f4dc40ac212616ffa3ef1a2d759e5b40e7e5e4a81ee14503f5eaf619e03" exitCode=0 Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.015642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" event={"ID":"b9612f2b-06bd-4536-ad46-c74dfcd8d517","Type":"ContainerDied","Data":"3ab87f4dc40ac212616ffa3ef1a2d759e5b40e7e5e4a81ee14503f5eaf619e03"} Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.018269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" event={"ID":"615a38ca-24cd-4f8c-82cf-764c8f0f68bb","Type":"ContainerDied","Data":"4b0d2123de086cb65f5e7af81eb359fda256d963a28a1b0a8301493cf36cf534"} Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.018326 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b0d2123de086cb65f5e7af81eb359fda256d963a28a1b0a8301493cf36cf534" Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.018367 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h" Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.764155 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp"] Jan 22 07:30:04 crc kubenswrapper[4982]: I0122 07:30:04.801615 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484405-xlvhp"] Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.350429 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.446165 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle\") pod \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.446325 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util\") pod \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.446425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7clvv\" (UniqueName: \"kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv\") pod \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\" (UID: \"b9612f2b-06bd-4536-ad46-c74dfcd8d517\") " Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.447776 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle" (OuterVolumeSpecName: "bundle") pod "b9612f2b-06bd-4536-ad46-c74dfcd8d517" (UID: "b9612f2b-06bd-4536-ad46-c74dfcd8d517"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.454079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv" (OuterVolumeSpecName: "kube-api-access-7clvv") pod "b9612f2b-06bd-4536-ad46-c74dfcd8d517" (UID: "b9612f2b-06bd-4536-ad46-c74dfcd8d517"). InnerVolumeSpecName "kube-api-access-7clvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.458897 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util" (OuterVolumeSpecName: "util") pod "b9612f2b-06bd-4536-ad46-c74dfcd8d517" (UID: "b9612f2b-06bd-4536-ad46-c74dfcd8d517"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.548985 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.549022 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b9612f2b-06bd-4536-ad46-c74dfcd8d517-util\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.549037 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7clvv\" (UniqueName: \"kubernetes.io/projected/b9612f2b-06bd-4536-ad46-c74dfcd8d517-kube-api-access-7clvv\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.723080 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:30:05 crc kubenswrapper[4982]: E0122 07:30:05.724252 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:30:05 crc kubenswrapper[4982]: I0122 07:30:05.741388 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b38cb7c7-9698-4142-896b-01cbb320f58a" path="/var/lib/kubelet/pods/b38cb7c7-9698-4142-896b-01cbb320f58a/volumes" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.045202 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" event={"ID":"b9612f2b-06bd-4536-ad46-c74dfcd8d517","Type":"ContainerDied","Data":"19fcb87e2c646e2716917e06a64daca3ae1da6a2b95906b2dfb6d6e7fffd0f79"} Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.045533 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19fcb87e2c646e2716917e06a64daca3ae1da6a2b95906b2dfb6d6e7fffd0f79" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.045487 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.566399 4982 scope.go:117] "RemoveContainer" containerID="4c5c84709f67855d277c1a243c9c35f9cf59362bfaeee6ddf99ee353419a5fc6" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.609154 4982 scope.go:117] "RemoveContainer" containerID="a3a01d13c170a6034340b48ac2dbb794eb0908be81e280a57a677eb463959afa" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.648768 4982 scope.go:117] "RemoveContainer" containerID="67d2b5f3c1efe69086c8b4388b8e823c6cc54f075c82d8d33600ba80c5db133d" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.688061 4982 scope.go:117] "RemoveContainer" containerID="7f867cdebd63b42ebf9d53880264a14570c17898cb5dc6866ca8ccf70d50aa98" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.729210 4982 scope.go:117] "RemoveContainer" containerID="50fce7693c69c8d79de602b70d1b2919800743a223649ffca9552985d97c472e" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.768402 4982 scope.go:117] "RemoveContainer" containerID="b8bf00fe84e1522399135c08d3c7c5dae6b6b0128140a0866ca394a1f7660949" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.806066 4982 scope.go:117] "RemoveContainer" containerID="c8200c57fc403fcffa3f29e4a00cd1fecb42fbb2954e1a00d40db5ec9b05c92a" Jan 22 07:30:06 crc kubenswrapper[4982]: I0122 07:30:06.847211 4982 scope.go:117] "RemoveContainer" containerID="fe5235f9b57df932353fb0dc56092f5b393a9d3584d5b397e5c4b59133f774c0" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.082579 4982 generic.go:334] "Generic (PLEG): container finished" podID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerID="2dc0d63afa106ab83e3d91c78e49bc6f2e76b08fd10599993c145d2acf78598c" exitCode=137 Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.082661 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerDied","Data":"2dc0d63afa106ab83e3d91c78e49bc6f2e76b08fd10599993c145d2acf78598c"} Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.083170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b44bfdd57-k7hnf" event={"ID":"db75fc6f-fad5-4212-be6b-e310f66ee471","Type":"ContainerDied","Data":"671c066c1ae8a9f53bb2d5d8e91706d09a6980664638ff03ac1156488df5939f"} Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.083183 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="671c066c1ae8a9f53bb2d5d8e91706d09a6980664638ff03ac1156488df5939f" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.138227 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.224141 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key\") pod \"db75fc6f-fad5-4212-be6b-e310f66ee471\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.224333 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data\") pod \"db75fc6f-fad5-4212-be6b-e310f66ee471\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.224368 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs\") pod \"db75fc6f-fad5-4212-be6b-e310f66ee471\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.224423 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frqc2\" (UniqueName: \"kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2\") pod \"db75fc6f-fad5-4212-be6b-e310f66ee471\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.224516 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts\") pod \"db75fc6f-fad5-4212-be6b-e310f66ee471\" (UID: \"db75fc6f-fad5-4212-be6b-e310f66ee471\") " Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.225676 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs" (OuterVolumeSpecName: "logs") pod "db75fc6f-fad5-4212-be6b-e310f66ee471" (UID: "db75fc6f-fad5-4212-be6b-e310f66ee471"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.230346 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "db75fc6f-fad5-4212-be6b-e310f66ee471" (UID: "db75fc6f-fad5-4212-be6b-e310f66ee471"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.230651 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2" (OuterVolumeSpecName: "kube-api-access-frqc2") pod "db75fc6f-fad5-4212-be6b-e310f66ee471" (UID: "db75fc6f-fad5-4212-be6b-e310f66ee471"). InnerVolumeSpecName "kube-api-access-frqc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.255238 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts" (OuterVolumeSpecName: "scripts") pod "db75fc6f-fad5-4212-be6b-e310f66ee471" (UID: "db75fc6f-fad5-4212-be6b-e310f66ee471"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.257103 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data" (OuterVolumeSpecName: "config-data") pod "db75fc6f-fad5-4212-be6b-e310f66ee471" (UID: "db75fc6f-fad5-4212-be6b-e310f66ee471"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.327667 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db75fc6f-fad5-4212-be6b-e310f66ee471-logs\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.327994 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frqc2\" (UniqueName: \"kubernetes.io/projected/db75fc6f-fad5-4212-be6b-e310f66ee471-kube-api-access-frqc2\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.328096 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.328178 4982 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db75fc6f-fad5-4212-be6b-e310f66ee471-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:09 crc kubenswrapper[4982]: I0122 07:30:09.328282 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db75fc6f-fad5-4212-be6b-e310f66ee471-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:09 crc kubenswrapper[4982]: E0122 07:30:09.951004 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb75fc6f_fad5_4212_be6b_e310f66ee471.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb75fc6f_fad5_4212_be6b_e310f66ee471.slice/crio-671c066c1ae8a9f53bb2d5d8e91706d09a6980664638ff03ac1156488df5939f\": RecentStats: unable to find data in memory cache]" Jan 22 07:30:10 crc kubenswrapper[4982]: I0122 07:30:10.090949 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b44bfdd57-k7hnf" Jan 22 07:30:10 crc kubenswrapper[4982]: I0122 07:30:10.119714 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:30:10 crc kubenswrapper[4982]: I0122 07:30:10.130398 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b44bfdd57-k7hnf"] Jan 22 07:30:11 crc kubenswrapper[4982]: I0122 07:30:11.730167 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" path="/var/lib/kubelet/pods/db75fc6f-fad5-4212-be6b-e310f66ee471/volumes" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.719089 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.719774 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.997661 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks"] Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.998632 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615a38ca-24cd-4f8c-82cf-764c8f0f68bb" containerName="collect-profiles" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.998717 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="615a38ca-24cd-4f8c-82cf-764c8f0f68bb" containerName="collect-profiles" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.998794 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="util" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.998897 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="util" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.998984 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon-log" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999040 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon-log" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.999111 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="pull" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999168 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="pull" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.999257 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999346 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" Jan 22 07:30:16 crc kubenswrapper[4982]: E0122 07:30:16.999433 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="extract" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999514 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="extract" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999840 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="615a38ca-24cd-4f8c-82cf-764c8f0f68bb" containerName="collect-profiles" Jan 22 07:30:16 crc kubenswrapper[4982]: I0122 07:30:16.999966 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon-log" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.000055 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9612f2b-06bd-4536-ad46-c74dfcd8d517" containerName="extract" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.000121 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="db75fc6f-fad5-4212-be6b-e310f66ee471" containerName="horizon" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.000811 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.007810 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.008927 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.009367 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-tfxh6" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.018110 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.064909 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.066519 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.076607 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-s2qj9" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.083063 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.093176 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwvfs\" (UniqueName: \"kubernetes.io/projected/d1776698-995c-47f7-bc78-c0123382daca-kube-api-access-jwvfs\") pod \"obo-prometheus-operator-68bc856cb9-gm2ks\" (UID: \"d1776698-995c-47f7-bc78-c0123382daca\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.093509 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.095222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.112458 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.122914 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.194590 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.194717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.194760 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwvfs\" (UniqueName: \"kubernetes.io/projected/d1776698-995c-47f7-bc78-c0123382daca-kube-api-access-jwvfs\") pod \"obo-prometheus-operator-68bc856cb9-gm2ks\" (UID: \"d1776698-995c-47f7-bc78-c0123382daca\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.194792 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.194817 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.216143 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-m4rnp"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.217394 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.219512 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwvfs\" (UniqueName: \"kubernetes.io/projected/d1776698-995c-47f7-bc78-c0123382daca-kube-api-access-jwvfs\") pod \"obo-prometheus-operator-68bc856cb9-gm2ks\" (UID: \"d1776698-995c-47f7-bc78-c0123382daca\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.222195 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.222670 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-zbzr6" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.233778 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-m4rnp"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.296954 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.297002 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.297068 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/624f63e9-4045-48b7-9352-85b78572c5ec-observability-operator-tls\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.297135 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.297210 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxcwv\" (UniqueName: \"kubernetes.io/projected/624f63e9-4045-48b7-9352-85b78572c5ec-kube-api-access-fxcwv\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.297246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.315925 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.316496 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.316835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0c419053-764e-456d-a3e0-86cd79f71c24-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-tkv9d\" (UID: \"0c419053-764e-456d-a3e0-86cd79f71c24\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.321366 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a1faaf21-b27e-4428-af7c-d3be126a9ed7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6d86744544-9cb44\" (UID: \"a1faaf21-b27e-4428-af7c-d3be126a9ed7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.323938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.385811 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.400173 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/624f63e9-4045-48b7-9352-85b78572c5ec-observability-operator-tls\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.400305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxcwv\" (UniqueName: \"kubernetes.io/projected/624f63e9-4045-48b7-9352-85b78572c5ec-kube-api-access-fxcwv\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.405014 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/624f63e9-4045-48b7-9352-85b78572c5ec-observability-operator-tls\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.415370 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.475691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxcwv\" (UniqueName: \"kubernetes.io/projected/624f63e9-4045-48b7-9352-85b78572c5ec-kube-api-access-fxcwv\") pod \"observability-operator-59bdc8b94-m4rnp\" (UID: \"624f63e9-4045-48b7-9352-85b78572c5ec\") " pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.517602 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hvlnj"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.527662 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.534140 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-ng5rv" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.558732 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hvlnj"] Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.585447 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.608727 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz965\" (UniqueName: \"kubernetes.io/projected/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-kube-api-access-nz965\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.608876 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.712206 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.712410 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz965\" (UniqueName: \"kubernetes.io/projected/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-kube-api-access-nz965\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.716217 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-openshift-service-ca\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.750202 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz965\" (UniqueName: \"kubernetes.io/projected/fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8-kube-api-access-nz965\") pod \"perses-operator-5bf474d74f-hvlnj\" (UID: \"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8\") " pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.875334 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:17 crc kubenswrapper[4982]: I0122 07:30:17.954066 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks"] Jan 22 07:30:17 crc kubenswrapper[4982]: W0122 07:30:17.965368 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1776698_995c_47f7_bc78_c0123382daca.slice/crio-d05388d5867aa77089903bb6321df90d809ad8c8bde365619a18ee33bc9244c0 WatchSource:0}: Error finding container d05388d5867aa77089903bb6321df90d809ad8c8bde365619a18ee33bc9244c0: Status 404 returned error can't find the container with id d05388d5867aa77089903bb6321df90d809ad8c8bde365619a18ee33bc9244c0 Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.056718 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-w2b2h"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.070142 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-w2b2h"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.092340 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.184220 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" event={"ID":"a1faaf21-b27e-4428-af7c-d3be126a9ed7","Type":"ContainerStarted","Data":"146e3e5581c51e885fda059be298b97a8601f7f944483959075eb6e4b864f8de"} Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.185799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" event={"ID":"d1776698-995c-47f7-bc78-c0123382daca","Type":"ContainerStarted","Data":"d05388d5867aa77089903bb6321df90d809ad8c8bde365619a18ee33bc9244c0"} Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.196608 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d"] Jan 22 07:30:18 crc kubenswrapper[4982]: W0122 07:30:18.202271 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c419053_764e_456d_a3e0_86cd79f71c24.slice/crio-20d3da2c70e5f35d5bc49609980918ccb2d536e8657f26b4763493833b8537f3 WatchSource:0}: Error finding container 20d3da2c70e5f35d5bc49609980918ccb2d536e8657f26b4763493833b8537f3: Status 404 returned error can't find the container with id 20d3da2c70e5f35d5bc49609980918ccb2d536e8657f26b4763493833b8537f3 Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.295956 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-m4rnp"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.441212 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-hvlnj"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.855540 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.857497 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.872077 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.935684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.935887 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:18 crc kubenswrapper[4982]: I0122 07:30:18.935966 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnfvt\" (UniqueName: \"kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.037981 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.038345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnfvt\" (UniqueName: \"kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.038446 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.039058 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.039347 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.056610 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnfvt\" (UniqueName: \"kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt\") pod \"certified-operators-8gfth\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.186479 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.202800 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" event={"ID":"0c419053-764e-456d-a3e0-86cd79f71c24","Type":"ContainerStarted","Data":"20d3da2c70e5f35d5bc49609980918ccb2d536e8657f26b4763493833b8537f3"} Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.216078 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" event={"ID":"624f63e9-4045-48b7-9352-85b78572c5ec","Type":"ContainerStarted","Data":"28e253c9ba4b21097d43d7ffe1be1c7a31f13d8a6cac6cdfb190a97eb4b2ae00"} Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.233426 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" event={"ID":"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8","Type":"ContainerStarted","Data":"220ed02d62e66c32039f70db192754c77e8cb0e6422cfab681811c6a599e4dee"} Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.461933 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kszzd"] Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.482125 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kszzd"] Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.752604 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61a89c15-5367-4977-8be9-80f16a2ef3a5" path="/var/lib/kubelet/pods/61a89c15-5367-4977-8be9-80f16a2ef3a5/volumes" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.754573 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea8cecab-7e99-40fe-93da-eb0a9427a422" path="/var/lib/kubelet/pods/ea8cecab-7e99-40fe-93da-eb0a9427a422/volumes" Jan 22 07:30:19 crc kubenswrapper[4982]: I0122 07:30:19.756869 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:20 crc kubenswrapper[4982]: I0122 07:30:20.269059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerStarted","Data":"f3939db1619169de51f77ab905aabb591f9a7fa4ef1d849e1e8ba1cec1f25c80"} Jan 22 07:30:21 crc kubenswrapper[4982]: I0122 07:30:21.284505 4982 generic.go:334] "Generic (PLEG): container finished" podID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerID="205f648c66d8403a496c7d89cdcb08a400f37967a602dd0521a8f8677ddf8ea7" exitCode=0 Jan 22 07:30:21 crc kubenswrapper[4982]: I0122 07:30:21.284565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerDied","Data":"205f648c66d8403a496c7d89cdcb08a400f37967a602dd0521a8f8677ddf8ea7"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.359496 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerStarted","Data":"04c13f0e73d10edcaefce56e4e457fe10aa32df515061c86613789a9d0be7ccf"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.361999 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" event={"ID":"d1776698-995c-47f7-bc78-c0123382daca","Type":"ContainerStarted","Data":"1b6e9126d8d8b6d199036a3e9d3611134006aaf5254111bbdc739f4b9bbfd9c7"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.363609 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" event={"ID":"624f63e9-4045-48b7-9352-85b78572c5ec","Type":"ContainerStarted","Data":"bb32e0b8da5dd7d17a18c7a73138b01fe33e23aacf4f3ce31bd14921b6510020"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.363784 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.366134 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.366523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" event={"ID":"fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8","Type":"ContainerStarted","Data":"6f62755f14d976af1db0c2b2e44d6207a12e9157d607c78917b028995ad76236"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.366623 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.368313 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" event={"ID":"a1faaf21-b27e-4428-af7c-d3be126a9ed7","Type":"ContainerStarted","Data":"f044fd8a95963c213a74b15e252b418052928ad4aa40aeee704c0030ce646d56"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.370144 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" event={"ID":"0c419053-764e-456d-a3e0-86cd79f71c24","Type":"ContainerStarted","Data":"db35bf011f41d992b5e81e4ad03be3058006dad2c4ff91201549b348e3d7413d"} Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.399503 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-gm2ks" podStartSLOduration=3.432039271 podStartE2EDuration="12.39948199s" podCreationTimestamp="2026-01-22 07:30:16 +0000 UTC" firstStartedPulling="2026-01-22 07:30:17.968997415 +0000 UTC m=+6278.807635418" lastFinishedPulling="2026-01-22 07:30:26.936440134 +0000 UTC m=+6287.775078137" observedRunningTime="2026-01-22 07:30:28.39134143 +0000 UTC m=+6289.229979443" watchObservedRunningTime="2026-01-22 07:30:28.39948199 +0000 UTC m=+6289.238119993" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.422168 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-9cb44" podStartSLOduration=2.529292447 podStartE2EDuration="11.422143022s" podCreationTimestamp="2026-01-22 07:30:17 +0000 UTC" firstStartedPulling="2026-01-22 07:30:18.099593581 +0000 UTC m=+6278.938231584" lastFinishedPulling="2026-01-22 07:30:26.992444156 +0000 UTC m=+6287.831082159" observedRunningTime="2026-01-22 07:30:28.41355027 +0000 UTC m=+6289.252188273" watchObservedRunningTime="2026-01-22 07:30:28.422143022 +0000 UTC m=+6289.260781025" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.443098 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" podStartSLOduration=3.04447574 podStartE2EDuration="11.443076548s" podCreationTimestamp="2026-01-22 07:30:17 +0000 UTC" firstStartedPulling="2026-01-22 07:30:18.443072287 +0000 UTC m=+6279.281710290" lastFinishedPulling="2026-01-22 07:30:26.841673095 +0000 UTC m=+6287.680311098" observedRunningTime="2026-01-22 07:30:28.430472777 +0000 UTC m=+6289.269110780" watchObservedRunningTime="2026-01-22 07:30:28.443076548 +0000 UTC m=+6289.281714551" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.513689 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-m4rnp" podStartSLOduration=2.901850398 podStartE2EDuration="11.513666684s" podCreationTimestamp="2026-01-22 07:30:17 +0000 UTC" firstStartedPulling="2026-01-22 07:30:18.324810693 +0000 UTC m=+6279.163448686" lastFinishedPulling="2026-01-22 07:30:26.936626929 +0000 UTC m=+6287.775264972" observedRunningTime="2026-01-22 07:30:28.477742153 +0000 UTC m=+6289.316380156" watchObservedRunningTime="2026-01-22 07:30:28.513666684 +0000 UTC m=+6289.352304687" Jan 22 07:30:28 crc kubenswrapper[4982]: I0122 07:30:28.525664 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6d86744544-tkv9d" podStartSLOduration=2.897932673 podStartE2EDuration="11.525637747s" podCreationTimestamp="2026-01-22 07:30:17 +0000 UTC" firstStartedPulling="2026-01-22 07:30:18.20617846 +0000 UTC m=+6279.044816463" lastFinishedPulling="2026-01-22 07:30:26.833883534 +0000 UTC m=+6287.672521537" observedRunningTime="2026-01-22 07:30:28.495283177 +0000 UTC m=+6289.333921180" watchObservedRunningTime="2026-01-22 07:30:28.525637747 +0000 UTC m=+6289.364275750" Jan 22 07:30:29 crc kubenswrapper[4982]: I0122 07:30:29.380555 4982 generic.go:334] "Generic (PLEG): container finished" podID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerID="04c13f0e73d10edcaefce56e4e457fe10aa32df515061c86613789a9d0be7ccf" exitCode=0 Jan 22 07:30:29 crc kubenswrapper[4982]: I0122 07:30:29.380651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerDied","Data":"04c13f0e73d10edcaefce56e4e457fe10aa32df515061c86613789a9d0be7ccf"} Jan 22 07:30:29 crc kubenswrapper[4982]: I0122 07:30:29.384328 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:30:30 crc kubenswrapper[4982]: I0122 07:30:30.392474 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerStarted","Data":"a61b5026d88fd140e2baa8b23e3fbc705b899eac7411417813294620ac7f52c7"} Jan 22 07:30:31 crc kubenswrapper[4982]: I0122 07:30:31.723375 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:30:31 crc kubenswrapper[4982]: E0122 07:30:31.748690 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:30:37 crc kubenswrapper[4982]: I0122 07:30:37.878796 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-hvlnj" Jan 22 07:30:37 crc kubenswrapper[4982]: I0122 07:30:37.896266 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8gfth" podStartSLOduration=11.113720786 podStartE2EDuration="19.896245081s" podCreationTimestamp="2026-01-22 07:30:18 +0000 UTC" firstStartedPulling="2026-01-22 07:30:21.29061729 +0000 UTC m=+6282.129255293" lastFinishedPulling="2026-01-22 07:30:30.073141585 +0000 UTC m=+6290.911779588" observedRunningTime="2026-01-22 07:30:30.418783598 +0000 UTC m=+6291.257421601" watchObservedRunningTime="2026-01-22 07:30:37.896245081 +0000 UTC m=+6298.734883084" Jan 22 07:30:38 crc kubenswrapper[4982]: I0122 07:30:38.044628 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-xgj4f"] Jan 22 07:30:38 crc kubenswrapper[4982]: I0122 07:30:38.052947 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-xgj4f"] Jan 22 07:30:39 crc kubenswrapper[4982]: I0122 07:30:39.187835 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:39 crc kubenswrapper[4982]: I0122 07:30:39.187929 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:39 crc kubenswrapper[4982]: I0122 07:30:39.243572 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:39 crc kubenswrapper[4982]: I0122 07:30:39.555451 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:39 crc kubenswrapper[4982]: I0122 07:30:39.755638 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9" path="/var/lib/kubelet/pods/b2b8dcac-4ae8-4264-a0e9-67e14d9c1ab9/volumes" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.199597 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.663039 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.663264 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="1fde4825-4373-436f-9da0-2ee79b723de4" containerName="openstackclient" containerID="cri-o://cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21" gracePeriod=2 Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.680559 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.702497 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: E0122 07:30:40.702919 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fde4825-4373-436f-9da0-2ee79b723de4" containerName="openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.702935 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fde4825-4373-436f-9da0-2ee79b723de4" containerName="openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.703104 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fde4825-4373-436f-9da0-2ee79b723de4" containerName="openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.703783 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.741950 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.746421 4982 status_manager.go:875] "Failed to update status for pod" pod="openstack/openstackclient" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T07:30:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T07:30:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T07:30:40Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T07:30:40Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:2b4f8494513a3af102066fec5868ab167ac8664aceb2f0c639d7a0b60260a944\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"openstackclient\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/clouds.yaml\\\",\\\"name\\\":\\\"openstack-config\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/secure.yaml\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/cloudrc\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n86r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T07:30:40Z\\\"}}\" for pod \"openstack\"/\"openstackclient\": pods \"openstackclient\" not found" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.783829 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: E0122 07:30:40.784786 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-8n86r openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.798954 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.800936 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.801033 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n86r\" (UniqueName: \"kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.801094 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.810944 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.812279 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.819196 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.822268 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.844462 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1fde4825-4373-436f-9da0-2ee79b723de4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.904812 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n86r\" (UniqueName: \"kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.905082 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config-secret\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.905189 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.905300 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtmck\" (UniqueName: \"kubernetes.io/projected/a473cc16-7761-429e-b28a-b2d502832ef9-kube-api-access-wtmck\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.905515 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.905677 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.906551 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:40 crc kubenswrapper[4982]: E0122 07:30:40.909862 4982 projected.go:194] Error preparing data for projected volume kube-api-access-8n86r for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (66e2fff1-64a5-46fd-8e59-ade07f0cd3c4) does not match the UID in record. The object might have been deleted and then recreated Jan 22 07:30:40 crc kubenswrapper[4982]: E0122 07:30:40.909928 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r podName:66e2fff1-64a5-46fd-8e59-ade07f0cd3c4 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:41.40990717 +0000 UTC m=+6302.248545173 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-8n86r" (UniqueName: "kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r") pod "openstackclient" (UID: "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (66e2fff1-64a5-46fd-8e59-ade07f0cd3c4) does not match the UID in record. The object might have been deleted and then recreated Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.912648 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.914046 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.918512 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jbfvt" Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.923675 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 07:30:40 crc kubenswrapper[4982]: I0122 07:30:40.924938 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.010887 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtmck\" (UniqueName: \"kubernetes.io/projected/a473cc16-7761-429e-b28a-b2d502832ef9-kube-api-access-wtmck\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.010959 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.011031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64bhn\" (UniqueName: \"kubernetes.io/projected/e4b2d188-afb5-4d60-8376-b187161abe16-kube-api-access-64bhn\") pod \"kube-state-metrics-0\" (UID: \"e4b2d188-afb5-4d60-8376-b187161abe16\") " pod="openstack/kube-state-metrics-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.011121 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config-secret\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.012400 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.030429 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a473cc16-7761-429e-b28a-b2d502832ef9-openstack-config-secret\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.061563 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtmck\" (UniqueName: \"kubernetes.io/projected/a473cc16-7761-429e-b28a-b2d502832ef9-kube-api-access-wtmck\") pod \"openstackclient\" (UID: \"a473cc16-7761-429e-b28a-b2d502832ef9\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.113030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64bhn\" (UniqueName: \"kubernetes.io/projected/e4b2d188-afb5-4d60-8376-b187161abe16-kube-api-access-64bhn\") pod \"kube-state-metrics-0\" (UID: \"e4b2d188-afb5-4d60-8376-b187161abe16\") " pod="openstack/kube-state-metrics-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.147688 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.165733 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64bhn\" (UniqueName: \"kubernetes.io/projected/e4b2d188-afb5-4d60-8376-b187161abe16-kube-api-access-64bhn\") pod \"kube-state-metrics-0\" (UID: \"e4b2d188-afb5-4d60-8376-b187161abe16\") " pod="openstack/kube-state-metrics-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.288381 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.426996 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n86r\" (UniqueName: \"kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r\") pod \"openstackclient\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: E0122 07:30:41.430312 4982 projected.go:194] Error preparing data for projected volume kube-api-access-8n86r for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (66e2fff1-64a5-46fd-8e59-ade07f0cd3c4) does not match the UID in record. The object might have been deleted and then recreated Jan 22 07:30:41 crc kubenswrapper[4982]: E0122 07:30:41.430382 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r podName:66e2fff1-64a5-46fd-8e59-ade07f0cd3c4 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:42.430359274 +0000 UTC m=+6303.268997277 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-8n86r" (UniqueName: "kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r") pod "openstackclient" (UID: "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (66e2fff1-64a5-46fd-8e59-ade07f0cd3c4) does not match the UID in record. The object might have been deleted and then recreated Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.520966 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8gfth" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="registry-server" containerID="cri-o://a61b5026d88fd140e2baa8b23e3fbc705b899eac7411417813294620ac7f52c7" gracePeriod=2 Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.521443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.541279 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.553875 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.561222 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.632536 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config\") pod \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.632762 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret\") pod \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\" (UID: \"66e2fff1-64a5-46fd-8e59-ade07f0cd3c4\") " Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.633365 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n86r\" (UniqueName: \"kubernetes.io/projected/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-kube-api-access-8n86r\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.635399 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" (UID: "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.654546 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" (UID: "66e2fff1-64a5-46fd-8e59-ade07f0cd3c4"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.713286 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.715190 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.737115 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-mjvfz" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.737139 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.737169 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.737326 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.741115 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.741307 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.748441 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.792289 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" path="/var/lib/kubelet/pods/66e2fff1-64a5-46fd-8e59-ade07f0cd3c4/volumes" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.792949 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842384 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842419 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842507 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6hnr\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-kube-api-access-q6hnr\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842526 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842598 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842640 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.842718 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.943927 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944258 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944347 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6hnr\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-kube-api-access-q6hnr\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944366 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944412 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.944443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.954253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.954428 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4b519ad5-12f9-4681-851d-a7da821a20f2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.955129 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.964697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.966525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.972098 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.988974 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6hnr\" (UniqueName: \"kubernetes.io/projected/4b519ad5-12f9-4681-851d-a7da821a20f2-kube-api-access-q6hnr\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:41 crc kubenswrapper[4982]: I0122 07:30:41.993374 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4b519ad5-12f9-4681-851d-a7da821a20f2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4b519ad5-12f9-4681-851d-a7da821a20f2\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.106397 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.239766 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.242430 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.251911 4982 reflector.go:561] object-"openstack"/"metric-storage-prometheus-dockercfg-ktlhc": failed to list *v1.Secret: secrets "metric-storage-prometheus-dockercfg-ktlhc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.251954 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-web-config": failed to list *v1.Secret: secrets "prometheus-metric-storage-web-config" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.251997 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-web-config\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"prometheus-metric-storage-web-config\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.251954 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"metric-storage-prometheus-dockercfg-ktlhc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"metric-storage-prometheus-dockercfg-ktlhc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252048 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-tls-assets-0": failed to list *v1.Secret: secrets "prometheus-metric-storage-tls-assets-0" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252053 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-rulefiles-1": failed to list *v1.ConfigMap: configmaps "prometheus-metric-storage-rulefiles-1" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252062 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-tls-assets-0\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"prometheus-metric-storage-tls-assets-0\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252067 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-rulefiles-1\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"prometheus-metric-storage-rulefiles-1\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252097 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file": failed to list *v1.Secret: secrets "prometheus-metric-storage-thanos-prometheus-http-client-file" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252105 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-rulefiles-2": failed to list *v1.ConfigMap: configmaps "prometheus-metric-storage-rulefiles-2" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252110 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-thanos-prometheus-http-client-file\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"prometheus-metric-storage-thanos-prometheus-http-client-file\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252116 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-rulefiles-2\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"prometheus-metric-storage-rulefiles-2\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252152 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage": failed to list *v1.Secret: secrets "prometheus-metric-storage" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252162 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"prometheus-metric-storage\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.252418 4982 reflector.go:561] object-"openstack"/"prometheus-metric-storage-rulefiles-0": failed to list *v1.ConfigMap: configmaps "prometheus-metric-storage-rulefiles-0" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Jan 22 07:30:42 crc kubenswrapper[4982]: E0122 07:30:42.252435 4982 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"prometheus-metric-storage-rulefiles-0\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"prometheus-metric-storage-rulefiles-0\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.271277 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368192 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368352 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p92jq\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-kube-api-access-p92jq\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.368605 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.369577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.369648 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.369701 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.472952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p92jq\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-kube-api-access-p92jq\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473032 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473119 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473143 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473178 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473225 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473267 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473291 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.473346 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.478940 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.482475 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.482501 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/66eaeb4215b0a8fd344d4f426efd2143403d4475b991581041ec6c1688b57d65/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.502688 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p92jq\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-kube-api-access-p92jq\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.565095 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a473cc16-7761-429e-b28a-b2d502832ef9","Type":"ContainerStarted","Data":"b67e54f5bd449d228a0d96e6797367969b5a83f82d76c710439bb2ddb4d83b91"} Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.575249 4982 generic.go:334] "Generic (PLEG): container finished" podID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerID="a61b5026d88fd140e2baa8b23e3fbc705b899eac7411417813294620ac7f52c7" exitCode=0 Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.575348 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.575873 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerDied","Data":"a61b5026d88fd140e2baa8b23e3fbc705b899eac7411417813294620ac7f52c7"} Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.591777 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.598775 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="66e2fff1-64a5-46fd-8e59-ade07f0cd3c4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.692354 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 07:30:42 crc kubenswrapper[4982]: W0122 07:30:42.743609 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4b2d188_afb5_4d60_8376_b187161abe16.slice/crio-b4685fe58032dedcce4f5dab026e347419ab5c651a72167a10ea06634370e240 WatchSource:0}: Error finding container b4685fe58032dedcce4f5dab026e347419ab5c651a72167a10ea06634370e240: Status 404 returned error can't find the container with id b4685fe58032dedcce4f5dab026e347419ab5c651a72167a10ea06634370e240 Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.797971 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.806281 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b9b564bc-f92e-4270-9ed4-9811b167dcbf\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.882898 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content\") pod \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.883323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities\") pod \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.883406 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnfvt\" (UniqueName: \"kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt\") pod \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\" (UID: \"89ac01c7-1b9d-4b89-867e-b581cbe713c7\") " Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.884710 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities" (OuterVolumeSpecName: "utilities") pod "89ac01c7-1b9d-4b89-867e-b581cbe713c7" (UID: "89ac01c7-1b9d-4b89-867e-b581cbe713c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.889096 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt" (OuterVolumeSpecName: "kube-api-access-pnfvt") pod "89ac01c7-1b9d-4b89-867e-b581cbe713c7" (UID: "89ac01c7-1b9d-4b89-867e-b581cbe713c7"). InnerVolumeSpecName "kube-api-access-pnfvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.897763 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.934202 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89ac01c7-1b9d-4b89-867e-b581cbe713c7" (UID: "89ac01c7-1b9d-4b89-867e-b581cbe713c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.987878 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.987908 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnfvt\" (UniqueName: \"kubernetes.io/projected/89ac01c7-1b9d-4b89-867e-b581cbe713c7-kube-api-access-pnfvt\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:42 crc kubenswrapper[4982]: I0122 07:30:42.987918 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ac01c7-1b9d-4b89-867e-b581cbe713c7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.107098 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.119580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.210340 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.292933 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59vrx\" (UniqueName: \"kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx\") pod \"1fde4825-4373-436f-9da0-2ee79b723de4\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.293336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config\") pod \"1fde4825-4373-436f-9da0-2ee79b723de4\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.293430 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret\") pod \"1fde4825-4373-436f-9da0-2ee79b723de4\" (UID: \"1fde4825-4373-436f-9da0-2ee79b723de4\") " Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.300317 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx" (OuterVolumeSpecName: "kube-api-access-59vrx") pod "1fde4825-4373-436f-9da0-2ee79b723de4" (UID: "1fde4825-4373-436f-9da0-2ee79b723de4"). InnerVolumeSpecName "kube-api-access-59vrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.330612 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1fde4825-4373-436f-9da0-2ee79b723de4" (UID: "1fde4825-4373-436f-9da0-2ee79b723de4"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.366072 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1fde4825-4373-436f-9da0-2ee79b723de4" (UID: "1fde4825-4373-436f-9da0-2ee79b723de4"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.395603 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.395635 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59vrx\" (UniqueName: \"kubernetes.io/projected/1fde4825-4373-436f-9da0-2ee79b723de4-kube-api-access-59vrx\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.395644 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1fde4825-4373-436f-9da0-2ee79b723de4-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.450416 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.455348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474298 4982 configmap.go:193] Couldn't get configMap openstack/prometheus-metric-storage-rulefiles-0: failed to sync configmap cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474298 4982 projected.go:263] Couldn't get secret openstack/prometheus-metric-storage-tls-assets-0: failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474581 4982 projected.go:194] Error preparing data for projected volume tls-assets for pod openstack/prometheus-metric-storage-0: failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474311 4982 secret.go:188] Couldn't get secret openstack/prometheus-metric-storage: failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474338 4982 secret.go:188] Couldn't get secret openstack/prometheus-metric-storage-thanos-prometheus-http-client-file: failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474367 4982 configmap.go:193] Couldn't get configMap openstack/prometheus-metric-storage-rulefiles-1: failed to sync configmap cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474563 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0 podName:a4d1e87c-2e5d-4f07-b5aa-f56d61651468 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:43.974543073 +0000 UTC m=+6304.813181076 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "prometheus-metric-storage-rulefiles-0" (UniqueName: "kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0") pod "prometheus-metric-storage-0" (UID: "a4d1e87c-2e5d-4f07-b5aa-f56d61651468") : failed to sync configmap cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474722 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets podName:a4d1e87c-2e5d-4f07-b5aa-f56d61651468 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:43.974701257 +0000 UTC m=+6304.813339320 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-assets" (UniqueName: "kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets") pod "prometheus-metric-storage-0" (UID: "a4d1e87c-2e5d-4f07-b5aa-f56d61651468") : failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474742 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config podName:a4d1e87c-2e5d-4f07-b5aa-f56d61651468 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:43.974735428 +0000 UTC m=+6304.813373511 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config") pod "prometheus-metric-storage-0" (UID: "a4d1e87c-2e5d-4f07-b5aa-f56d61651468") : failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474768 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file podName:a4d1e87c-2e5d-4f07-b5aa-f56d61651468 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:43.974761789 +0000 UTC m=+6304.813399882 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "thanos-prometheus-http-client-file" (UniqueName: "kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file") pod "prometheus-metric-storage-0" (UID: "a4d1e87c-2e5d-4f07-b5aa-f56d61651468") : failed to sync secret cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.474789 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1 podName:a4d1e87c-2e5d-4f07-b5aa-f56d61651468 nodeName:}" failed. No retries permitted until 2026-01-22 07:30:43.974779899 +0000 UTC m=+6304.813417992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "prometheus-metric-storage-rulefiles-1" (UniqueName: "kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1") pod "prometheus-metric-storage-0" (UID: "a4d1e87c-2e5d-4f07-b5aa-f56d61651468") : failed to sync configmap cache: timed out waiting for the condition Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.516981 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.546434 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.590514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gfth" event={"ID":"89ac01c7-1b9d-4b89-867e-b581cbe713c7","Type":"ContainerDied","Data":"f3939db1619169de51f77ab905aabb591f9a7fa4ef1d849e1e8ba1cec1f25c80"} Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.590595 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gfth" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.590594 4982 scope.go:117] "RemoveContainer" containerID="a61b5026d88fd140e2baa8b23e3fbc705b899eac7411417813294620ac7f52c7" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.594817 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.596402 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a473cc16-7761-429e-b28a-b2d502832ef9","Type":"ContainerStarted","Data":"fe41ea100901741c33eeb56a751e199d156e065a4648a000885aac142ac92de4"} Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.604576 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e4b2d188-afb5-4d60-8376-b187161abe16","Type":"ContainerStarted","Data":"41dbbf50e8e03ba39b674074330722c41852a34a0b3914c062c0fbc3775b0d95"} Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.604670 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e4b2d188-afb5-4d60-8376-b187161abe16","Type":"ContainerStarted","Data":"b4685fe58032dedcce4f5dab026e347419ab5c651a72167a10ea06634370e240"} Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.604696 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.608255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4b519ad5-12f9-4681-851d-a7da821a20f2","Type":"ContainerStarted","Data":"abb4f880d57b1a0331e6b0dd423038fed8e8e35ee8fdaa2332f35635cd1e82fb"} Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.609769 4982 generic.go:334] "Generic (PLEG): container finished" podID="1fde4825-4373-436f-9da0-2ee79b723de4" containerID="cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21" exitCode=137 Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.609892 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.632326 4982 scope.go:117] "RemoveContainer" containerID="04c13f0e73d10edcaefce56e4e457fe10aa32df515061c86613789a9d0be7ccf" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.633478 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.633450673 podStartE2EDuration="3.633450673s" podCreationTimestamp="2026-01-22 07:30:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:30:43.615069938 +0000 UTC m=+6304.453707941" watchObservedRunningTime="2026-01-22 07:30:43.633450673 +0000 UTC m=+6304.472088676" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.639954 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.256352131 podStartE2EDuration="3.639933509s" podCreationTimestamp="2026-01-22 07:30:40 +0000 UTC" firstStartedPulling="2026-01-22 07:30:42.752083704 +0000 UTC m=+6303.590721707" lastFinishedPulling="2026-01-22 07:30:43.135665082 +0000 UTC m=+6303.974303085" observedRunningTime="2026-01-22 07:30:43.639777214 +0000 UTC m=+6304.478415237" watchObservedRunningTime="2026-01-22 07:30:43.639933509 +0000 UTC m=+6304.478571512" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.643978 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.668823 4982 scope.go:117] "RemoveContainer" containerID="205f648c66d8403a496c7d89cdcb08a400f37967a602dd0521a8f8677ddf8ea7" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.674498 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1fde4825-4373-436f-9da0-2ee79b723de4" podUID="a473cc16-7761-429e-b28a-b2d502832ef9" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.680346 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.684945 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.700802 4982 scope.go:117] "RemoveContainer" containerID="cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.709272 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8gfth"] Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.730232 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fde4825-4373-436f-9da0-2ee79b723de4" path="/var/lib/kubelet/pods/1fde4825-4373-436f-9da0-2ee79b723de4/volumes" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.730758 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" path="/var/lib/kubelet/pods/89ac01c7-1b9d-4b89-867e-b581cbe713c7/volumes" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.731698 4982 scope.go:117] "RemoveContainer" containerID="cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21" Jan 22 07:30:43 crc kubenswrapper[4982]: E0122 07:30:43.732133 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21\": container with ID starting with cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21 not found: ID does not exist" containerID="cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.732162 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21"} err="failed to get container status \"cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21\": rpc error: code = NotFound desc = could not find container \"cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21\": container with ID starting with cb2dabe31fcd2f0b70d5ece9914cf1373cb4111b59727680e79cb61fed97ba21 not found: ID does not exist" Jan 22 07:30:43 crc kubenswrapper[4982]: I0122 07:30:43.804317 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-ktlhc" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.007814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.008202 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.008279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.008305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.008339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.009080 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.009244 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.011712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.012132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.012411 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4d1e87c-2e5d-4f07-b5aa-f56d61651468-config\") pod \"prometheus-metric-storage-0\" (UID: \"a4d1e87c-2e5d-4f07-b5aa-f56d61651468\") " pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:44 crc kubenswrapper[4982]: I0122 07:30:44.069828 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 07:30:45 crc kubenswrapper[4982]: I0122 07:30:44.655131 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 07:30:45 crc kubenswrapper[4982]: W0122 07:30:44.658907 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4d1e87c_2e5d_4f07_b5aa_f56d61651468.slice/crio-407c7ad2331eb9f073d510c503914126b9001c4adb1e587aa63dcdf574a7a136 WatchSource:0}: Error finding container 407c7ad2331eb9f073d510c503914126b9001c4adb1e587aa63dcdf574a7a136: Status 404 returned error can't find the container with id 407c7ad2331eb9f073d510c503914126b9001c4adb1e587aa63dcdf574a7a136 Jan 22 07:30:45 crc kubenswrapper[4982]: I0122 07:30:45.679714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerStarted","Data":"407c7ad2331eb9f073d510c503914126b9001c4adb1e587aa63dcdf574a7a136"} Jan 22 07:30:45 crc kubenswrapper[4982]: I0122 07:30:45.722966 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:30:45 crc kubenswrapper[4982]: E0122 07:30:45.723262 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:30:49 crc kubenswrapper[4982]: I0122 07:30:49.717523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerStarted","Data":"f51a5701e57dc76897060fafe60652c5caf2076c094c27023779476037ecf938"} Jan 22 07:30:49 crc kubenswrapper[4982]: I0122 07:30:49.729623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4b519ad5-12f9-4681-851d-a7da821a20f2","Type":"ContainerStarted","Data":"8acc5c3edae487f309d4221b7bf16bc8cf5cf140daa823abbb96831bd1903ce8"} Jan 22 07:30:51 crc kubenswrapper[4982]: I0122 07:30:51.294695 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 07:30:55 crc kubenswrapper[4982]: I0122 07:30:55.786284 4982 generic.go:334] "Generic (PLEG): container finished" podID="4b519ad5-12f9-4681-851d-a7da821a20f2" containerID="8acc5c3edae487f309d4221b7bf16bc8cf5cf140daa823abbb96831bd1903ce8" exitCode=0 Jan 22 07:30:55 crc kubenswrapper[4982]: I0122 07:30:55.786393 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4b519ad5-12f9-4681-851d-a7da821a20f2","Type":"ContainerDied","Data":"8acc5c3edae487f309d4221b7bf16bc8cf5cf140daa823abbb96831bd1903ce8"} Jan 22 07:30:55 crc kubenswrapper[4982]: I0122 07:30:55.789234 4982 generic.go:334] "Generic (PLEG): container finished" podID="a4d1e87c-2e5d-4f07-b5aa-f56d61651468" containerID="f51a5701e57dc76897060fafe60652c5caf2076c094c27023779476037ecf938" exitCode=0 Jan 22 07:30:55 crc kubenswrapper[4982]: I0122 07:30:55.789283 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerDied","Data":"f51a5701e57dc76897060fafe60652c5caf2076c094c27023779476037ecf938"} Jan 22 07:30:58 crc kubenswrapper[4982]: I0122 07:30:58.831922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4b519ad5-12f9-4681-851d-a7da821a20f2","Type":"ContainerStarted","Data":"d19ffaf7720585502043d47d9fd10eb58b12d4c020c91d3142f2815747893cc6"} Jan 22 07:31:00 crc kubenswrapper[4982]: I0122 07:31:00.719001 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:31:01 crc kubenswrapper[4982]: I0122 07:31:01.871510 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4b519ad5-12f9-4681-851d-a7da821a20f2","Type":"ContainerStarted","Data":"19eb419394274a61e88d7db26b51421c9cf49e9a71cceb06454fe52cf75365c0"} Jan 22 07:31:01 crc kubenswrapper[4982]: I0122 07:31:01.872907 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Jan 22 07:31:01 crc kubenswrapper[4982]: I0122 07:31:01.875502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Jan 22 07:31:01 crc kubenswrapper[4982]: I0122 07:31:01.903747 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.669115666 podStartE2EDuration="20.903728896s" podCreationTimestamp="2026-01-22 07:30:41 +0000 UTC" firstStartedPulling="2026-01-22 07:30:42.932011183 +0000 UTC m=+6303.770649186" lastFinishedPulling="2026-01-22 07:30:58.166624413 +0000 UTC m=+6319.005262416" observedRunningTime="2026-01-22 07:31:01.892470582 +0000 UTC m=+6322.731108625" watchObservedRunningTime="2026-01-22 07:31:01.903728896 +0000 UTC m=+6322.742366899" Jan 22 07:31:03 crc kubenswrapper[4982]: I0122 07:31:03.898174 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerStarted","Data":"7af30db813a7d30575a94471b3e0759e54c5e2bfbe5b14ad849d3d43a9928511"} Jan 22 07:31:03 crc kubenswrapper[4982]: I0122 07:31:03.900711 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a"} Jan 22 07:31:07 crc kubenswrapper[4982]: I0122 07:31:07.121252 4982 scope.go:117] "RemoveContainer" containerID="f9c7323d761213c9a2e77661b39b87257cbb660b961d5a20095c5a33b5885bdb" Jan 22 07:31:07 crc kubenswrapper[4982]: I0122 07:31:07.192030 4982 scope.go:117] "RemoveContainer" containerID="37dd053aa395863a7ed087aef355f968037903e695d5b8024276bcf8476373a1" Jan 22 07:31:07 crc kubenswrapper[4982]: I0122 07:31:07.235970 4982 scope.go:117] "RemoveContainer" containerID="b35ddf4ca6a5a5256d6181255cb43f80c16cdc208c5f2fba1aa41b0724f2973f" Jan 22 07:31:07 crc kubenswrapper[4982]: I0122 07:31:07.264648 4982 scope.go:117] "RemoveContainer" containerID="276bc140d3653f77c7c4033e854c9e0b888327a783659c51968322d8e2fd52ed" Jan 22 07:31:07 crc kubenswrapper[4982]: I0122 07:31:07.935477 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerStarted","Data":"1412cde92acf79a91c627aebf08ac76332bc380cef28b3f7d8903acac6035b36"} Jan 22 07:31:10 crc kubenswrapper[4982]: I0122 07:31:10.969638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a4d1e87c-2e5d-4f07-b5aa-f56d61651468","Type":"ContainerStarted","Data":"dec10d8958ba1651a485a21b65b31adaa7099eec00de91a44dd39e6ef464b602"} Jan 22 07:31:11 crc kubenswrapper[4982]: I0122 07:31:11.022713 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.46458727 podStartE2EDuration="30.022694336s" podCreationTimestamp="2026-01-22 07:30:41 +0000 UTC" firstStartedPulling="2026-01-22 07:30:44.660648321 +0000 UTC m=+6305.499286324" lastFinishedPulling="2026-01-22 07:31:10.218755347 +0000 UTC m=+6331.057393390" observedRunningTime="2026-01-22 07:31:11.001435602 +0000 UTC m=+6331.840073615" watchObservedRunningTime="2026-01-22 07:31:11.022694336 +0000 UTC m=+6331.861332359" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.070565 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.071302 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.074201 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.808386 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:14 crc kubenswrapper[4982]: E0122 07:31:14.809002 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="extract-utilities" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.809017 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="extract-utilities" Jan 22 07:31:14 crc kubenswrapper[4982]: E0122 07:31:14.809061 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="registry-server" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.809071 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="registry-server" Jan 22 07:31:14 crc kubenswrapper[4982]: E0122 07:31:14.809094 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="extract-content" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.809103 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="extract-content" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.809332 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ac01c7-1b9d-4b89-867e-b581cbe713c7" containerName="registry-server" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.811006 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.812895 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.813236 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.829386 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950599 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950666 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950747 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950842 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950924 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950957 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:14 crc kubenswrapper[4982]: I0122 07:31:14.950985 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz94k\" (UniqueName: \"kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.008712 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.062098 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.062212 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065206 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065285 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz94k\" (UniqueName: \"kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.065706 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.066730 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.076377 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.092019 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.092527 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.094569 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz94k\" (UniqueName: \"kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.096710 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts\") pod \"ceilometer-0\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.130960 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:15 crc kubenswrapper[4982]: W0122 07:31:15.688660 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd92a834e_b7a1_4c75_bad0_7fb0220ef086.slice/crio-532c94a4b0670e7db33d2fb103898364b314e926f11c12c19466a99ddc262acb WatchSource:0}: Error finding container 532c94a4b0670e7db33d2fb103898364b314e926f11c12c19466a99ddc262acb: Status 404 returned error can't find the container with id 532c94a4b0670e7db33d2fb103898364b314e926f11c12c19466a99ddc262acb Jan 22 07:31:15 crc kubenswrapper[4982]: I0122 07:31:15.696097 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:16 crc kubenswrapper[4982]: I0122 07:31:16.019751 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerStarted","Data":"532c94a4b0670e7db33d2fb103898364b314e926f11c12c19466a99ddc262acb"} Jan 22 07:31:19 crc kubenswrapper[4982]: I0122 07:31:19.054970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerStarted","Data":"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15"} Jan 22 07:31:19 crc kubenswrapper[4982]: I0122 07:31:19.055464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerStarted","Data":"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4"} Jan 22 07:31:20 crc kubenswrapper[4982]: I0122 07:31:20.064497 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerStarted","Data":"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21"} Jan 22 07:31:22 crc kubenswrapper[4982]: I0122 07:31:22.084997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerStarted","Data":"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2"} Jan 22 07:31:22 crc kubenswrapper[4982]: I0122 07:31:22.086581 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 07:31:22 crc kubenswrapper[4982]: I0122 07:31:22.119271 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.753412963 podStartE2EDuration="8.119253437s" podCreationTimestamp="2026-01-22 07:31:14 +0000 UTC" firstStartedPulling="2026-01-22 07:31:15.69119374 +0000 UTC m=+6336.529831743" lastFinishedPulling="2026-01-22 07:31:21.057034214 +0000 UTC m=+6341.895672217" observedRunningTime="2026-01-22 07:31:22.10937642 +0000 UTC m=+6342.948014433" watchObservedRunningTime="2026-01-22 07:31:22.119253437 +0000 UTC m=+6342.957891440" Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.047731 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-e101-account-create-update-qcbwn"] Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.058246 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mdz95"] Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.066199 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-e101-account-create-update-qcbwn"] Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.075490 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mdz95"] Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.735007 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79c90dad-03c7-4a7d-b392-0a62a5e3cd6e" path="/var/lib/kubelet/pods/79c90dad-03c7-4a7d-b392-0a62a5e3cd6e/volumes" Jan 22 07:31:25 crc kubenswrapper[4982]: I0122 07:31:25.735916 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d90e2efe-a9e5-4f92-bc09-97e92cae152c" path="/var/lib/kubelet/pods/d90e2efe-a9e5-4f92-bc09-97e92cae152c/volumes" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.445477 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-stvft"] Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.448259 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.458323 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-stvft"] Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.555240 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pz6p\" (UniqueName: \"kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.555717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.655783 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-ca6f-account-create-update-x9qzb"] Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.657803 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-ca6f-account-create-update-x9qzb"] Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.657938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.658352 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.658482 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pz6p\" (UniqueName: \"kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.659384 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.667912 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.689409 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pz6p\" (UniqueName: \"kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p\") pod \"aodh-db-create-stvft\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.762627 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9fv4\" (UniqueName: \"kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.762835 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.806044 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-stvft" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.864843 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9fv4\" (UniqueName: \"kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.864960 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.867348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.882560 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9fv4\" (UniqueName: \"kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4\") pod \"aodh-ca6f-account-create-update-x9qzb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:27 crc kubenswrapper[4982]: I0122 07:31:27.980001 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:28 crc kubenswrapper[4982]: I0122 07:31:28.322610 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-stvft"] Jan 22 07:31:28 crc kubenswrapper[4982]: I0122 07:31:28.514823 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-ca6f-account-create-update-x9qzb"] Jan 22 07:31:28 crc kubenswrapper[4982]: W0122 07:31:28.521015 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbecb33c4_a51f_43dc_a715_38cbd1bd3cbb.slice/crio-65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5 WatchSource:0}: Error finding container 65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5: Status 404 returned error can't find the container with id 65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5 Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.156613 4982 generic.go:334] "Generic (PLEG): container finished" podID="9dede625-5da4-4c2c-b10a-d62d45d2d6cd" containerID="236981769c67c0d04368b824f79e404c515981a95ac65b4afb8fe1094aa5df10" exitCode=0 Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.156741 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-stvft" event={"ID":"9dede625-5da4-4c2c-b10a-d62d45d2d6cd","Type":"ContainerDied","Data":"236981769c67c0d04368b824f79e404c515981a95ac65b4afb8fe1094aa5df10"} Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.157025 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-stvft" event={"ID":"9dede625-5da4-4c2c-b10a-d62d45d2d6cd","Type":"ContainerStarted","Data":"2a9798359142bb4bca510e0dfb83c061b9e5a50cc38f1a34d8179bfc2c8378eb"} Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.158577 4982 generic.go:334] "Generic (PLEG): container finished" podID="becb33c4-a51f-43dc-a715-38cbd1bd3cbb" containerID="fda6dcca3f60f5093ffdda3b98622604896c0b7e415fe1b27534b488b79d06df" exitCode=0 Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.158610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-ca6f-account-create-update-x9qzb" event={"ID":"becb33c4-a51f-43dc-a715-38cbd1bd3cbb","Type":"ContainerDied","Data":"fda6dcca3f60f5093ffdda3b98622604896c0b7e415fe1b27534b488b79d06df"} Jan 22 07:31:29 crc kubenswrapper[4982]: I0122 07:31:29.158628 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-ca6f-account-create-update-x9qzb" event={"ID":"becb33c4-a51f-43dc-a715-38cbd1bd3cbb","Type":"ContainerStarted","Data":"65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5"} Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.668285 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.675104 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-stvft" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.831448 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts\") pod \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.831797 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pz6p\" (UniqueName: \"kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p\") pod \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.831946 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts\") pod \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\" (UID: \"9dede625-5da4-4c2c-b10a-d62d45d2d6cd\") " Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.832202 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9fv4\" (UniqueName: \"kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4\") pod \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\" (UID: \"becb33c4-a51f-43dc-a715-38cbd1bd3cbb\") " Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.832806 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9dede625-5da4-4c2c-b10a-d62d45d2d6cd" (UID: "9dede625-5da4-4c2c-b10a-d62d45d2d6cd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.832913 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "becb33c4-a51f-43dc-a715-38cbd1bd3cbb" (UID: "becb33c4-a51f-43dc-a715-38cbd1bd3cbb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.835819 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.836093 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.845915 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p" (OuterVolumeSpecName: "kube-api-access-2pz6p") pod "9dede625-5da4-4c2c-b10a-d62d45d2d6cd" (UID: "9dede625-5da4-4c2c-b10a-d62d45d2d6cd"). InnerVolumeSpecName "kube-api-access-2pz6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.846097 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4" (OuterVolumeSpecName: "kube-api-access-k9fv4") pod "becb33c4-a51f-43dc-a715-38cbd1bd3cbb" (UID: "becb33c4-a51f-43dc-a715-38cbd1bd3cbb"). InnerVolumeSpecName "kube-api-access-k9fv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.938626 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9fv4\" (UniqueName: \"kubernetes.io/projected/becb33c4-a51f-43dc-a715-38cbd1bd3cbb-kube-api-access-k9fv4\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:30 crc kubenswrapper[4982]: I0122 07:31:30.938669 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pz6p\" (UniqueName: \"kubernetes.io/projected/9dede625-5da4-4c2c-b10a-d62d45d2d6cd-kube-api-access-2pz6p\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.177157 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-stvft" event={"ID":"9dede625-5da4-4c2c-b10a-d62d45d2d6cd","Type":"ContainerDied","Data":"2a9798359142bb4bca510e0dfb83c061b9e5a50cc38f1a34d8179bfc2c8378eb"} Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.177190 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a9798359142bb4bca510e0dfb83c061b9e5a50cc38f1a34d8179bfc2c8378eb" Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.177239 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-stvft" Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.188167 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-ca6f-account-create-update-x9qzb" event={"ID":"becb33c4-a51f-43dc-a715-38cbd1bd3cbb","Type":"ContainerDied","Data":"65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5"} Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.188223 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65efb276dd5b88e49198d4dbabcdb1511817598102e2047a393cd74e2d7cd4d5" Jan 22 07:31:31 crc kubenswrapper[4982]: I0122 07:31:31.188293 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-ca6f-account-create-update-x9qzb" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.893941 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-wgf8h"] Jan 22 07:31:32 crc kubenswrapper[4982]: E0122 07:31:32.894923 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dede625-5da4-4c2c-b10a-d62d45d2d6cd" containerName="mariadb-database-create" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.894939 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dede625-5da4-4c2c-b10a-d62d45d2d6cd" containerName="mariadb-database-create" Jan 22 07:31:32 crc kubenswrapper[4982]: E0122 07:31:32.894967 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="becb33c4-a51f-43dc-a715-38cbd1bd3cbb" containerName="mariadb-account-create-update" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.894976 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="becb33c4-a51f-43dc-a715-38cbd1bd3cbb" containerName="mariadb-account-create-update" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.895247 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dede625-5da4-4c2c-b10a-d62d45d2d6cd" containerName="mariadb-database-create" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.895268 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="becb33c4-a51f-43dc-a715-38cbd1bd3cbb" containerName="mariadb-account-create-update" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.896666 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.899277 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-9md54" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.899790 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.900009 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.900393 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.906610 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-wgf8h"] Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.988427 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.988583 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.988702 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh484\" (UniqueName: \"kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:32 crc kubenswrapper[4982]: I0122 07:31:32.988760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.036121 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-dn4mz"] Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.053731 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-dn4mz"] Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.090985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.091092 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh484\" (UniqueName: \"kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.091133 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.091214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.096953 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.098294 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.110350 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.112374 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh484\" (UniqueName: \"kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484\") pod \"aodh-db-sync-wgf8h\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.220373 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.750613 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6103be86-a098-4bcd-8217-251f25d1a8b9" path="/var/lib/kubelet/pods/6103be86-a098-4bcd-8217-251f25d1a8b9/volumes" Jan 22 07:31:33 crc kubenswrapper[4982]: I0122 07:31:33.764148 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-wgf8h"] Jan 22 07:31:34 crc kubenswrapper[4982]: I0122 07:31:34.213208 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-wgf8h" event={"ID":"28f46de4-fbd7-436f-ab8e-01d03d10f323","Type":"ContainerStarted","Data":"0642450f2c1fb679a6b6b44890fa9e628e0ab2feb2f6febfe890345005d2d0d1"} Jan 22 07:31:39 crc kubenswrapper[4982]: I0122 07:31:39.282400 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-wgf8h" event={"ID":"28f46de4-fbd7-436f-ab8e-01d03d10f323","Type":"ContainerStarted","Data":"a133e98520d6d4e154b4c2c9d3224fdf982cb968cedb70eb3836cb780639226f"} Jan 22 07:31:39 crc kubenswrapper[4982]: I0122 07:31:39.317096 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-wgf8h" podStartSLOduration=2.509500548 podStartE2EDuration="7.317075793s" podCreationTimestamp="2026-01-22 07:31:32 +0000 UTC" firstStartedPulling="2026-01-22 07:31:33.794716955 +0000 UTC m=+6354.633354958" lastFinishedPulling="2026-01-22 07:31:38.60229219 +0000 UTC m=+6359.440930203" observedRunningTime="2026-01-22 07:31:39.299933481 +0000 UTC m=+6360.138571494" watchObservedRunningTime="2026-01-22 07:31:39.317075793 +0000 UTC m=+6360.155713796" Jan 22 07:31:41 crc kubenswrapper[4982]: I0122 07:31:41.307357 4982 generic.go:334] "Generic (PLEG): container finished" podID="28f46de4-fbd7-436f-ab8e-01d03d10f323" containerID="a133e98520d6d4e154b4c2c9d3224fdf982cb968cedb70eb3836cb780639226f" exitCode=0 Jan 22 07:31:41 crc kubenswrapper[4982]: I0122 07:31:41.307462 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-wgf8h" event={"ID":"28f46de4-fbd7-436f-ab8e-01d03d10f323","Type":"ContainerDied","Data":"a133e98520d6d4e154b4c2c9d3224fdf982cb968cedb70eb3836cb780639226f"} Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.768123 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.930369 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle\") pod \"28f46de4-fbd7-436f-ab8e-01d03d10f323\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.930696 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mh484\" (UniqueName: \"kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484\") pod \"28f46de4-fbd7-436f-ab8e-01d03d10f323\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.930826 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts\") pod \"28f46de4-fbd7-436f-ab8e-01d03d10f323\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.930883 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data\") pod \"28f46de4-fbd7-436f-ab8e-01d03d10f323\" (UID: \"28f46de4-fbd7-436f-ab8e-01d03d10f323\") " Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.939041 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts" (OuterVolumeSpecName: "scripts") pod "28f46de4-fbd7-436f-ab8e-01d03d10f323" (UID: "28f46de4-fbd7-436f-ab8e-01d03d10f323"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.939062 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484" (OuterVolumeSpecName: "kube-api-access-mh484") pod "28f46de4-fbd7-436f-ab8e-01d03d10f323" (UID: "28f46de4-fbd7-436f-ab8e-01d03d10f323"). InnerVolumeSpecName "kube-api-access-mh484". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.972754 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28f46de4-fbd7-436f-ab8e-01d03d10f323" (UID: "28f46de4-fbd7-436f-ab8e-01d03d10f323"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:42 crc kubenswrapper[4982]: I0122 07:31:42.979257 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data" (OuterVolumeSpecName: "config-data") pod "28f46de4-fbd7-436f-ab8e-01d03d10f323" (UID: "28f46de4-fbd7-436f-ab8e-01d03d10f323"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.033504 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.033542 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mh484\" (UniqueName: \"kubernetes.io/projected/28f46de4-fbd7-436f-ab8e-01d03d10f323-kube-api-access-mh484\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.033553 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.033561 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f46de4-fbd7-436f-ab8e-01d03d10f323-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.330473 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-wgf8h" event={"ID":"28f46de4-fbd7-436f-ab8e-01d03d10f323","Type":"ContainerDied","Data":"0642450f2c1fb679a6b6b44890fa9e628e0ab2feb2f6febfe890345005d2d0d1"} Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.330517 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0642450f2c1fb679a6b6b44890fa9e628e0ab2feb2f6febfe890345005d2d0d1" Jan 22 07:31:43 crc kubenswrapper[4982]: I0122 07:31:43.330669 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-wgf8h" Jan 22 07:31:45 crc kubenswrapper[4982]: I0122 07:31:45.138786 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.559230 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Jan 22 07:31:47 crc kubenswrapper[4982]: E0122 07:31:47.560285 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28f46de4-fbd7-436f-ab8e-01d03d10f323" containerName="aodh-db-sync" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.560303 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="28f46de4-fbd7-436f-ab8e-01d03d10f323" containerName="aodh-db-sync" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.560593 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="28f46de4-fbd7-436f-ab8e-01d03d10f323" containerName="aodh-db-sync" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.563010 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.566520 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-9md54" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.566707 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.566937 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.582154 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.639073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.639188 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-config-data\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.639234 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vqzf\" (UniqueName: \"kubernetes.io/projected/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-kube-api-access-9vqzf\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.639265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-scripts\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.741228 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.741413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-config-data\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.741486 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vqzf\" (UniqueName: \"kubernetes.io/projected/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-kube-api-access-9vqzf\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.741522 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-scripts\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.746920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-config-data\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.748395 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-combined-ca-bundle\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.749208 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-scripts\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.761351 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vqzf\" (UniqueName: \"kubernetes.io/projected/8a8a4564-d7d6-4801-8bdd-e4554fcf08e0-kube-api-access-9vqzf\") pod \"aodh-0\" (UID: \"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0\") " pod="openstack/aodh-0" Jan 22 07:31:47 crc kubenswrapper[4982]: I0122 07:31:47.896682 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 07:31:48 crc kubenswrapper[4982]: I0122 07:31:48.433656 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.391410 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0","Type":"ContainerStarted","Data":"5f16632d0c6270f6f112e25d75962d82891055b28177e0e18ad4bf016c084e64"} Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.391654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0","Type":"ContainerStarted","Data":"16ae83c6809489b8f96167bc5c7459236dd420732c00878b79a21a53433100f4"} Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.876336 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.876957 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-central-agent" containerID="cri-o://5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4" gracePeriod=30 Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.877087 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="proxy-httpd" containerID="cri-o://a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2" gracePeriod=30 Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.877145 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="sg-core" containerID="cri-o://f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21" gracePeriod=30 Jan 22 07:31:49 crc kubenswrapper[4982]: I0122 07:31:49.877189 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-notification-agent" containerID="cri-o://aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15" gracePeriod=30 Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404583 4982 generic.go:334] "Generic (PLEG): container finished" podID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerID="a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2" exitCode=0 Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404912 4982 generic.go:334] "Generic (PLEG): container finished" podID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerID="f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21" exitCode=2 Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404927 4982 generic.go:334] "Generic (PLEG): container finished" podID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerID="5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4" exitCode=0 Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404794 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerDied","Data":"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2"} Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404969 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerDied","Data":"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21"} Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.404987 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerDied","Data":"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4"} Jan 22 07:31:50 crc kubenswrapper[4982]: I0122 07:31:50.876700 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014227 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz94k\" (UniqueName: \"kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014271 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014330 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014348 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014503 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.014542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle\") pod \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\" (UID: \"d92a834e-b7a1-4c75-bad0-7fb0220ef086\") " Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.016362 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.023203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k" (OuterVolumeSpecName: "kube-api-access-hz94k") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "kube-api-access-hz94k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.026354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.032336 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts" (OuterVolumeSpecName: "scripts") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.056597 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.104362 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116793 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116838 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz94k\" (UniqueName: \"kubernetes.io/projected/d92a834e-b7a1-4c75-bad0-7fb0220ef086-kube-api-access-hz94k\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116869 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116883 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116895 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d92a834e-b7a1-4c75-bad0-7fb0220ef086-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.116907 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.151602 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data" (OuterVolumeSpecName: "config-data") pod "d92a834e-b7a1-4c75-bad0-7fb0220ef086" (UID: "d92a834e-b7a1-4c75-bad0-7fb0220ef086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.219152 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d92a834e-b7a1-4c75-bad0-7fb0220ef086-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.417351 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0","Type":"ContainerStarted","Data":"7ae0c3f9306f3e0f7506b906bd8f5acdfede2f67d5019f322c833a32f165d78f"} Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.420694 4982 generic.go:334] "Generic (PLEG): container finished" podID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerID="aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15" exitCode=0 Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.420786 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.420782 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerDied","Data":"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15"} Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.421078 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d92a834e-b7a1-4c75-bad0-7fb0220ef086","Type":"ContainerDied","Data":"532c94a4b0670e7db33d2fb103898364b314e926f11c12c19466a99ddc262acb"} Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.421102 4982 scope.go:117] "RemoveContainer" containerID="a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.459313 4982 scope.go:117] "RemoveContainer" containerID="f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.474921 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.500739 4982 scope.go:117] "RemoveContainer" containerID="aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.501923 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.514387 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.515078 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="sg-core" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.515179 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="sg-core" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.515264 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-central-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.515326 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-central-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.515401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-notification-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.515457 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-notification-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.515530 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="proxy-httpd" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.515585 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="proxy-httpd" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.515923 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="proxy-httpd" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.516011 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-central-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.516093 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="sg-core" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.516158 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" containerName="ceilometer-notification-agent" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.518722 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.524569 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.524799 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.530749 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.534927 4982 scope.go:117] "RemoveContainer" containerID="5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.567122 4982 scope.go:117] "RemoveContainer" containerID="a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.567521 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2\": container with ID starting with a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2 not found: ID does not exist" containerID="a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.567551 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2"} err="failed to get container status \"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2\": rpc error: code = NotFound desc = could not find container \"a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2\": container with ID starting with a25bb6c379aee5d7995e68c2a0760dae4b91d70c2077c66009730708be54b7d2 not found: ID does not exist" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.567572 4982 scope.go:117] "RemoveContainer" containerID="f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.567760 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21\": container with ID starting with f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21 not found: ID does not exist" containerID="f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.567784 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21"} err="failed to get container status \"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21\": rpc error: code = NotFound desc = could not find container \"f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21\": container with ID starting with f6ed40ee201a387ed00fa5e6cdf48a1b73e0e18205dbe92ceb6ce2519199ea21 not found: ID does not exist" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.567798 4982 scope.go:117] "RemoveContainer" containerID="aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.568148 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15\": container with ID starting with aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15 not found: ID does not exist" containerID="aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.568193 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15"} err="failed to get container status \"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15\": rpc error: code = NotFound desc = could not find container \"aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15\": container with ID starting with aa2ef6f99461d8f03fc9b29b52e4774e84c6cb2e116d71852d837879534a4a15 not found: ID does not exist" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.568219 4982 scope.go:117] "RemoveContainer" containerID="5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4" Jan 22 07:31:51 crc kubenswrapper[4982]: E0122 07:31:51.568912 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4\": container with ID starting with 5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4 not found: ID does not exist" containerID="5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.568936 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4"} err="failed to get container status \"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4\": rpc error: code = NotFound desc = could not find container \"5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4\": container with ID starting with 5781abecb0862ca1c9624831f81f416fa586a810459327dded876737b48b91e4 not found: ID does not exist" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626014 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626068 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626098 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b2jq\" (UniqueName: \"kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626141 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626205 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626246 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.626278 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728170 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728213 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728371 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728427 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b2jq\" (UniqueName: \"kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.728923 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.729035 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.732577 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d92a834e-b7a1-4c75-bad0-7fb0220ef086" path="/var/lib/kubelet/pods/d92a834e-b7a1-4c75-bad0-7fb0220ef086/volumes" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.733717 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.733820 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.733843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.734967 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.753327 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b2jq\" (UniqueName: \"kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq\") pod \"ceilometer-0\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " pod="openstack/ceilometer-0" Jan 22 07:31:51 crc kubenswrapper[4982]: I0122 07:31:51.848463 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:31:52 crc kubenswrapper[4982]: I0122 07:31:52.434685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0","Type":"ContainerStarted","Data":"f318218e73ce691a22eaf02ed58f530e9142268a142610bc905c2a38946ec1ae"} Jan 22 07:31:52 crc kubenswrapper[4982]: W0122 07:31:52.444797 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7afd50d_f3ce_4801_9879_294147a9435a.slice/crio-67c3aefc92fcfd138bb92f9db1d84b2f8c87caed46c9be0af4477fb05d2b5850 WatchSource:0}: Error finding container 67c3aefc92fcfd138bb92f9db1d84b2f8c87caed46c9be0af4477fb05d2b5850: Status 404 returned error can't find the container with id 67c3aefc92fcfd138bb92f9db1d84b2f8c87caed46c9be0af4477fb05d2b5850 Jan 22 07:31:52 crc kubenswrapper[4982]: I0122 07:31:52.453997 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:31:53 crc kubenswrapper[4982]: I0122 07:31:53.446217 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerStarted","Data":"67c3aefc92fcfd138bb92f9db1d84b2f8c87caed46c9be0af4477fb05d2b5850"} Jan 22 07:31:54 crc kubenswrapper[4982]: I0122 07:31:54.456694 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerStarted","Data":"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7"} Jan 22 07:31:54 crc kubenswrapper[4982]: I0122 07:31:54.459295 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"8a8a4564-d7d6-4801-8bdd-e4554fcf08e0","Type":"ContainerStarted","Data":"d7f00f791a4bbcb6dc0ebd9e744ebcfc454cd7395c349d8939c864802edd1b6c"} Jan 22 07:31:54 crc kubenswrapper[4982]: I0122 07:31:54.482296 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.807352305 podStartE2EDuration="7.482277829s" podCreationTimestamp="2026-01-22 07:31:47 +0000 UTC" firstStartedPulling="2026-01-22 07:31:48.440696893 +0000 UTC m=+6369.279334896" lastFinishedPulling="2026-01-22 07:31:54.115622417 +0000 UTC m=+6374.954260420" observedRunningTime="2026-01-22 07:31:54.478161027 +0000 UTC m=+6375.316799040" watchObservedRunningTime="2026-01-22 07:31:54.482277829 +0000 UTC m=+6375.320915832" Jan 22 07:31:55 crc kubenswrapper[4982]: I0122 07:31:55.483035 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerStarted","Data":"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234"} Jan 22 07:31:55 crc kubenswrapper[4982]: I0122 07:31:55.483612 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerStarted","Data":"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06"} Jan 22 07:31:57 crc kubenswrapper[4982]: I0122 07:31:57.545889 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerStarted","Data":"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa"} Jan 22 07:31:57 crc kubenswrapper[4982]: I0122 07:31:57.546581 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 07:31:57 crc kubenswrapper[4982]: I0122 07:31:57.576808 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.580700848 podStartE2EDuration="6.576785151s" podCreationTimestamp="2026-01-22 07:31:51 +0000 UTC" firstStartedPulling="2026-01-22 07:31:52.448308878 +0000 UTC m=+6373.286946881" lastFinishedPulling="2026-01-22 07:31:56.444393181 +0000 UTC m=+6377.283031184" observedRunningTime="2026-01-22 07:31:57.568094935 +0000 UTC m=+6378.406732938" watchObservedRunningTime="2026-01-22 07:31:57.576785151 +0000 UTC m=+6378.415423154" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.448121 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-w6vrx"] Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.453091 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.465321 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-w6vrx"] Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.560296 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqvgl\" (UniqueName: \"kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.560417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.563706 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-2f89-account-create-update-ggr5k"] Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.565128 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.567706 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.575081 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-2f89-account-create-update-ggr5k"] Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.662038 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.662192 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpxvv\" (UniqueName: \"kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.662260 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqvgl\" (UniqueName: \"kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.662354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.663183 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.688617 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqvgl\" (UniqueName: \"kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl\") pod \"manila-db-create-w6vrx\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.764617 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpxvv\" (UniqueName: \"kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.764800 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.765826 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.786867 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpxvv\" (UniqueName: \"kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv\") pod \"manila-2f89-account-create-update-ggr5k\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.788949 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:00 crc kubenswrapper[4982]: I0122 07:32:00.894178 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.351323 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-w6vrx"] Jan 22 07:32:01 crc kubenswrapper[4982]: W0122 07:32:01.360693 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod242a1426_a222_444f_805e_b54b394f4ca1.slice/crio-d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b WatchSource:0}: Error finding container d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b: Status 404 returned error can't find the container with id d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.484684 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-2f89-account-create-update-ggr5k"] Jan 22 07:32:01 crc kubenswrapper[4982]: W0122 07:32:01.488358 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod777282d9_58f5_4a8f_bd6e_e6e7b8269196.slice/crio-c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de WatchSource:0}: Error finding container c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de: Status 404 returned error can't find the container with id c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.614476 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-2f89-account-create-update-ggr5k" event={"ID":"777282d9-58f5-4a8f-bd6e-e6e7b8269196","Type":"ContainerStarted","Data":"c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de"} Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.616605 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-w6vrx" event={"ID":"242a1426-a222-444f-805e-b54b394f4ca1","Type":"ContainerStarted","Data":"99344cfb78435195c75851dca1c58d4b5d23795de0e231465e1a9f60015af407"} Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.616641 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-w6vrx" event={"ID":"242a1426-a222-444f-805e-b54b394f4ca1","Type":"ContainerStarted","Data":"d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b"} Jan 22 07:32:01 crc kubenswrapper[4982]: I0122 07:32:01.635950 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-w6vrx" podStartSLOduration=1.635922224 podStartE2EDuration="1.635922224s" podCreationTimestamp="2026-01-22 07:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:32:01.629422939 +0000 UTC m=+6382.468060932" watchObservedRunningTime="2026-01-22 07:32:01.635922224 +0000 UTC m=+6382.474560247" Jan 22 07:32:02 crc kubenswrapper[4982]: I0122 07:32:02.628817 4982 generic.go:334] "Generic (PLEG): container finished" podID="777282d9-58f5-4a8f-bd6e-e6e7b8269196" containerID="6d8e3cd212b7aff34a24c956c2b7778da798c9594db1d2d461c73dc52dca16d6" exitCode=0 Jan 22 07:32:02 crc kubenswrapper[4982]: I0122 07:32:02.629168 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-2f89-account-create-update-ggr5k" event={"ID":"777282d9-58f5-4a8f-bd6e-e6e7b8269196","Type":"ContainerDied","Data":"6d8e3cd212b7aff34a24c956c2b7778da798c9594db1d2d461c73dc52dca16d6"} Jan 22 07:32:02 crc kubenswrapper[4982]: I0122 07:32:02.630983 4982 generic.go:334] "Generic (PLEG): container finished" podID="242a1426-a222-444f-805e-b54b394f4ca1" containerID="99344cfb78435195c75851dca1c58d4b5d23795de0e231465e1a9f60015af407" exitCode=0 Jan 22 07:32:02 crc kubenswrapper[4982]: I0122 07:32:02.631013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-w6vrx" event={"ID":"242a1426-a222-444f-805e-b54b394f4ca1","Type":"ContainerDied","Data":"99344cfb78435195c75851dca1c58d4b5d23795de0e231465e1a9f60015af407"} Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.193073 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.201067 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.334606 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts\") pod \"242a1426-a222-444f-805e-b54b394f4ca1\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.335048 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqvgl\" (UniqueName: \"kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl\") pod \"242a1426-a222-444f-805e-b54b394f4ca1\" (UID: \"242a1426-a222-444f-805e-b54b394f4ca1\") " Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.335289 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpxvv\" (UniqueName: \"kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv\") pod \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.335541 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "242a1426-a222-444f-805e-b54b394f4ca1" (UID: "242a1426-a222-444f-805e-b54b394f4ca1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.336199 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts\") pod \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\" (UID: \"777282d9-58f5-4a8f-bd6e-e6e7b8269196\") " Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.336793 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "777282d9-58f5-4a8f-bd6e-e6e7b8269196" (UID: "777282d9-58f5-4a8f-bd6e-e6e7b8269196"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.337140 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/777282d9-58f5-4a8f-bd6e-e6e7b8269196-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.337225 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242a1426-a222-444f-805e-b54b394f4ca1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.342459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv" (OuterVolumeSpecName: "kube-api-access-xpxvv") pod "777282d9-58f5-4a8f-bd6e-e6e7b8269196" (UID: "777282d9-58f5-4a8f-bd6e-e6e7b8269196"). InnerVolumeSpecName "kube-api-access-xpxvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.343473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl" (OuterVolumeSpecName: "kube-api-access-gqvgl") pod "242a1426-a222-444f-805e-b54b394f4ca1" (UID: "242a1426-a222-444f-805e-b54b394f4ca1"). InnerVolumeSpecName "kube-api-access-gqvgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.439732 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqvgl\" (UniqueName: \"kubernetes.io/projected/242a1426-a222-444f-805e-b54b394f4ca1-kube-api-access-gqvgl\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.439790 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpxvv\" (UniqueName: \"kubernetes.io/projected/777282d9-58f5-4a8f-bd6e-e6e7b8269196-kube-api-access-xpxvv\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.655178 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-w6vrx" event={"ID":"242a1426-a222-444f-805e-b54b394f4ca1","Type":"ContainerDied","Data":"d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b"} Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.655225 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d825727b204f34d40c9475c87a69dbbebd4333098c6694836393308da4f15a3b" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.655290 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-w6vrx" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.657322 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-2f89-account-create-update-ggr5k" event={"ID":"777282d9-58f5-4a8f-bd6e-e6e7b8269196","Type":"ContainerDied","Data":"c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de"} Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.657350 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1295cc7e6496b4a4742cceee2f0c94ad5d05b34eaf281fa06b5d576660494de" Jan 22 07:32:04 crc kubenswrapper[4982]: I0122 07:32:04.657399 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-2f89-account-create-update-ggr5k" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.928751 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-vhkxb"] Jan 22 07:32:05 crc kubenswrapper[4982]: E0122 07:32:05.929641 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242a1426-a222-444f-805e-b54b394f4ca1" containerName="mariadb-database-create" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.929652 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="242a1426-a222-444f-805e-b54b394f4ca1" containerName="mariadb-database-create" Jan 22 07:32:05 crc kubenswrapper[4982]: E0122 07:32:05.929682 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="777282d9-58f5-4a8f-bd6e-e6e7b8269196" containerName="mariadb-account-create-update" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.929689 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="777282d9-58f5-4a8f-bd6e-e6e7b8269196" containerName="mariadb-account-create-update" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.929934 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="777282d9-58f5-4a8f-bd6e-e6e7b8269196" containerName="mariadb-account-create-update" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.929946 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="242a1426-a222-444f-805e-b54b394f4ca1" containerName="mariadb-database-create" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.930612 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.932325 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-z52hn" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.935699 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jan 22 07:32:05 crc kubenswrapper[4982]: I0122 07:32:05.955007 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-vhkxb"] Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.070916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2rhd\" (UniqueName: \"kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.071227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.071663 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.071943 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.174116 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.174312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.174443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.174504 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2rhd\" (UniqueName: \"kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.185731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.185896 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.185903 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.191163 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2rhd\" (UniqueName: \"kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd\") pod \"manila-db-sync-vhkxb\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.247833 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:06 crc kubenswrapper[4982]: I0122 07:32:06.835056 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-vhkxb"] Jan 22 07:32:07 crc kubenswrapper[4982]: I0122 07:32:07.465682 4982 scope.go:117] "RemoveContainer" containerID="f40c7977aa66361b43cce1fb85a0e9640ec417b46b0e1bab1205970879999852" Jan 22 07:32:07 crc kubenswrapper[4982]: I0122 07:32:07.502381 4982 scope.go:117] "RemoveContainer" containerID="2d1a27a8684f46ec867f38116bb0ac87dc1d2ed0dffe13595ecba038acfb46db" Jan 22 07:32:07 crc kubenswrapper[4982]: I0122 07:32:07.550866 4982 scope.go:117] "RemoveContainer" containerID="8744c92df627d0b0ece049b3aaee50058b2a19ca9b71876543b644ff8dfcf280" Jan 22 07:32:07 crc kubenswrapper[4982]: I0122 07:32:07.689796 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-vhkxb" event={"ID":"cf4cd78e-2b3f-4f15-aefc-850938dbadf6","Type":"ContainerStarted","Data":"805b7b67185800e2fb8c399505904df400ee583f6b09d1303f6e014bfe0b13b9"} Jan 22 07:32:13 crc kubenswrapper[4982]: I0122 07:32:13.781040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-vhkxb" event={"ID":"cf4cd78e-2b3f-4f15-aefc-850938dbadf6","Type":"ContainerStarted","Data":"cbf18aba83b773b2138be0e346472afcaff7b48783a019ba8c74c7ddaa5965a9"} Jan 22 07:32:13 crc kubenswrapper[4982]: I0122 07:32:13.810490 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-vhkxb" podStartSLOduration=3.33587618 podStartE2EDuration="8.81047125s" podCreationTimestamp="2026-01-22 07:32:05 +0000 UTC" firstStartedPulling="2026-01-22 07:32:06.837632142 +0000 UTC m=+6387.676270145" lastFinishedPulling="2026-01-22 07:32:12.312227212 +0000 UTC m=+6393.150865215" observedRunningTime="2026-01-22 07:32:13.797423258 +0000 UTC m=+6394.636061261" watchObservedRunningTime="2026-01-22 07:32:13.81047125 +0000 UTC m=+6394.649109253" Jan 22 07:32:14 crc kubenswrapper[4982]: I0122 07:32:14.792033 4982 generic.go:334] "Generic (PLEG): container finished" podID="cf4cd78e-2b3f-4f15-aefc-850938dbadf6" containerID="cbf18aba83b773b2138be0e346472afcaff7b48783a019ba8c74c7ddaa5965a9" exitCode=0 Jan 22 07:32:14 crc kubenswrapper[4982]: I0122 07:32:14.792118 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-vhkxb" event={"ID":"cf4cd78e-2b3f-4f15-aefc-850938dbadf6","Type":"ContainerDied","Data":"cbf18aba83b773b2138be0e346472afcaff7b48783a019ba8c74c7ddaa5965a9"} Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.357873 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.492413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle\") pod \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.492499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data\") pod \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.492699 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data\") pod \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.492921 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2rhd\" (UniqueName: \"kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd\") pod \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\" (UID: \"cf4cd78e-2b3f-4f15-aefc-850938dbadf6\") " Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.499741 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "cf4cd78e-2b3f-4f15-aefc-850938dbadf6" (UID: "cf4cd78e-2b3f-4f15-aefc-850938dbadf6"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.499957 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd" (OuterVolumeSpecName: "kube-api-access-v2rhd") pod "cf4cd78e-2b3f-4f15-aefc-850938dbadf6" (UID: "cf4cd78e-2b3f-4f15-aefc-850938dbadf6"). InnerVolumeSpecName "kube-api-access-v2rhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.502701 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data" (OuterVolumeSpecName: "config-data") pod "cf4cd78e-2b3f-4f15-aefc-850938dbadf6" (UID: "cf4cd78e-2b3f-4f15-aefc-850938dbadf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.529669 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf4cd78e-2b3f-4f15-aefc-850938dbadf6" (UID: "cf4cd78e-2b3f-4f15-aefc-850938dbadf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.595883 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.595928 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2rhd\" (UniqueName: \"kubernetes.io/projected/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-kube-api-access-v2rhd\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.595937 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.595951 4982 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cf4cd78e-2b3f-4f15-aefc-850938dbadf6-job-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.818683 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-vhkxb" event={"ID":"cf4cd78e-2b3f-4f15-aefc-850938dbadf6","Type":"ContainerDied","Data":"805b7b67185800e2fb8c399505904df400ee583f6b09d1303f6e014bfe0b13b9"} Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.819089 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="805b7b67185800e2fb8c399505904df400ee583f6b09d1303f6e014bfe0b13b9" Jan 22 07:32:16 crc kubenswrapper[4982]: I0122 07:32:16.818809 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-vhkxb" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.223641 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: E0122 07:32:17.224097 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4cd78e-2b3f-4f15-aefc-850938dbadf6" containerName="manila-db-sync" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.224115 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4cd78e-2b3f-4f15-aefc-850938dbadf6" containerName="manila-db-sync" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.224328 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4cd78e-2b3f-4f15-aefc-850938dbadf6" containerName="manila-db-sync" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.225416 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.230771 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.231133 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.231161 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.232673 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-z52hn" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.243996 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.247408 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.254586 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.266404 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.309901 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311000 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311069 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311088 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-scripts\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311200 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311224 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.311279 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zc62\" (UniqueName: \"kubernetes.io/projected/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-kube-api-access-5zc62\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.333780 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.364267 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.401635 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.415205 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd545\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-kube-api-access-jd545\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.415842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.416424 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-scripts\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417043 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417109 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417407 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-ceph\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.417986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.418102 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.418616 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-scripts\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.418816 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.419022 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.419064 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.419095 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zc62\" (UniqueName: \"kubernetes.io/projected/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-kube-api-access-5zc62\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.419122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtnpk\" (UniqueName: \"kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.419448 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.420171 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.420309 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.422532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-scripts\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.422614 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.423608 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.436654 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.436974 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zc62\" (UniqueName: \"kubernetes.io/projected/5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3-kube-api-access-5zc62\") pod \"manila-scheduler-0\" (UID: \"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3\") " pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-ceph\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522158 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522184 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522202 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-scripts\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522231 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522262 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522284 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtnpk\" (UniqueName: \"kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522336 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522357 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522378 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd545\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-kube-api-access-jd545\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522423 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522429 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.522479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.523303 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.523358 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/39694168-7dd0-449e-b3f7-fdc47ba2689e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.524054 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.524583 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.524597 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.528556 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-ceph\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.528813 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-scripts\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.528985 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.529028 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.529178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/39694168-7dd0-449e-b3f7-fdc47ba2689e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.541010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtnpk\" (UniqueName: \"kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk\") pod \"dnsmasq-dns-8589d7b9f9-lz752\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.543369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd545\" (UniqueName: \"kubernetes.io/projected/39694168-7dd0-449e-b3f7-fdc47ba2689e-kube-api-access-jd545\") pod \"manila-share-share1-0\" (UID: \"39694168-7dd0-449e-b3f7-fdc47ba2689e\") " pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.563601 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.592450 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.594463 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.598345 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.603583 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627212 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630f9011-553f-445e-97a7-35448f3bf607-etc-machine-id\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627249 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627267 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627298 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data-custom\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627365 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpb7q\" (UniqueName: \"kubernetes.io/projected/630f9011-553f-445e-97a7-35448f3bf607-kube-api-access-gpb7q\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627410 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-scripts\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.627467 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630f9011-553f-445e-97a7-35448f3bf607-logs\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.645824 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.696998 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729487 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630f9011-553f-445e-97a7-35448f3bf607-logs\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729586 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630f9011-553f-445e-97a7-35448f3bf607-etc-machine-id\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729606 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729623 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data-custom\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729715 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpb7q\" (UniqueName: \"kubernetes.io/projected/630f9011-553f-445e-97a7-35448f3bf607-kube-api-access-gpb7q\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.729759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-scripts\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.730241 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630f9011-553f-445e-97a7-35448f3bf607-logs\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.730295 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630f9011-553f-445e-97a7-35448f3bf607-etc-machine-id\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.734825 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.736263 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-scripts\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.736612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.757703 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630f9011-553f-445e-97a7-35448f3bf607-config-data-custom\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:17 crc kubenswrapper[4982]: I0122 07:32:17.758255 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpb7q\" (UniqueName: \"kubernetes.io/projected/630f9011-553f-445e-97a7-35448f3bf607-kube-api-access-gpb7q\") pod \"manila-api-0\" (UID: \"630f9011-553f-445e-97a7-35448f3bf607\") " pod="openstack/manila-api-0" Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.017778 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jan 22 07:32:18 crc kubenswrapper[4982]: W0122 07:32:18.260636 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3e77b2_4dfc_4461_a0d9_c3ddc906e4f3.slice/crio-8775b8c91591835913b96cb55f51139cd104251f27bfc34ccbf458c472aec314 WatchSource:0}: Error finding container 8775b8c91591835913b96cb55f51139cd104251f27bfc34ccbf458c472aec314: Status 404 returned error can't find the container with id 8775b8c91591835913b96cb55f51139cd104251f27bfc34ccbf458c472aec314 Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.263399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.434121 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.558326 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.838690 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3","Type":"ContainerStarted","Data":"8775b8c91591835913b96cb55f51139cd104251f27bfc34ccbf458c472aec314"} Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.840141 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" event={"ID":"5c5e4058-2c05-4b26-9364-f828e9e01253","Type":"ContainerStarted","Data":"9dca691a800b5a0e15688d52367554b96ea46b9967c6c7fc3bb742defbeb288c"} Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.841725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"39694168-7dd0-449e-b3f7-fdc47ba2689e","Type":"ContainerStarted","Data":"e4fae6ad2ecfec574a286c2b019bd9f9e66a89d5c295789ec277a5ff708790ab"} Jan 22 07:32:18 crc kubenswrapper[4982]: W0122 07:32:18.885075 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod630f9011_553f_445e_97a7_35448f3bf607.slice/crio-bffe9f8e02643decd1e406b07b66777002ddcf3222e920b06f560fb16af22c3b WatchSource:0}: Error finding container bffe9f8e02643decd1e406b07b66777002ddcf3222e920b06f560fb16af22c3b: Status 404 returned error can't find the container with id bffe9f8e02643decd1e406b07b66777002ddcf3222e920b06f560fb16af22c3b Jan 22 07:32:18 crc kubenswrapper[4982]: I0122 07:32:18.885369 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jan 22 07:32:19 crc kubenswrapper[4982]: I0122 07:32:19.864367 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"630f9011-553f-445e-97a7-35448f3bf607","Type":"ContainerStarted","Data":"cf8449a3ac6c9458277058775f24688eddd2c4a0576988f7309b0fc9a54ba2d5"} Jan 22 07:32:19 crc kubenswrapper[4982]: I0122 07:32:19.865186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"630f9011-553f-445e-97a7-35448f3bf607","Type":"ContainerStarted","Data":"bffe9f8e02643decd1e406b07b66777002ddcf3222e920b06f560fb16af22c3b"} Jan 22 07:32:19 crc kubenswrapper[4982]: I0122 07:32:19.871797 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3","Type":"ContainerStarted","Data":"d220aa51412c68d5aa887e301e6dc8598cff01b9261f92909277b233a9e415fb"} Jan 22 07:32:19 crc kubenswrapper[4982]: I0122 07:32:19.875324 4982 generic.go:334] "Generic (PLEG): container finished" podID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerID="0040f63e6c49227c890af705123fda8efc736f42330d5b991bd31624e5b5e7d9" exitCode=0 Jan 22 07:32:19 crc kubenswrapper[4982]: I0122 07:32:19.875364 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" event={"ID":"5c5e4058-2c05-4b26-9364-f828e9e01253","Type":"ContainerDied","Data":"0040f63e6c49227c890af705123fda8efc736f42330d5b991bd31624e5b5e7d9"} Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.889704 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"630f9011-553f-445e-97a7-35448f3bf607","Type":"ContainerStarted","Data":"040285fa71b16d7d72c1ff3cb17cd6abf2ba0f7c69cc4b6261dd6ac88c5eb63c"} Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.891934 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.899467 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3","Type":"ContainerStarted","Data":"283e20e7cfbc8a06f60ae50d12dc122962e732cf966f186517c0ed3a3112aaf8"} Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.904488 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" event={"ID":"5c5e4058-2c05-4b26-9364-f828e9e01253","Type":"ContainerStarted","Data":"8717ab7e46bcf6964750786b6e3c7b4fff1cca1be458e84aec2fee1d461b7103"} Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.905243 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.954723 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.954703302 podStartE2EDuration="3.954703302s" podCreationTimestamp="2026-01-22 07:32:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:32:20.929244346 +0000 UTC m=+6401.767882349" watchObservedRunningTime="2026-01-22 07:32:20.954703302 +0000 UTC m=+6401.793341305" Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.960301 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.063337756 podStartE2EDuration="3.960282693s" podCreationTimestamp="2026-01-22 07:32:17 +0000 UTC" firstStartedPulling="2026-01-22 07:32:18.264178879 +0000 UTC m=+6399.102816882" lastFinishedPulling="2026-01-22 07:32:19.161123816 +0000 UTC m=+6399.999761819" observedRunningTime="2026-01-22 07:32:20.947273571 +0000 UTC m=+6401.785911574" watchObservedRunningTime="2026-01-22 07:32:20.960282693 +0000 UTC m=+6401.798920696" Jan 22 07:32:20 crc kubenswrapper[4982]: I0122 07:32:20.973098 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" podStartSLOduration=3.9730761169999997 podStartE2EDuration="3.973076117s" podCreationTimestamp="2026-01-22 07:32:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:32:20.963267323 +0000 UTC m=+6401.801905336" watchObservedRunningTime="2026-01-22 07:32:20.973076117 +0000 UTC m=+6401.811714120" Jan 22 07:32:21 crc kubenswrapper[4982]: I0122 07:32:21.855453 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 07:32:25 crc kubenswrapper[4982]: I0122 07:32:25.980228 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"39694168-7dd0-449e-b3f7-fdc47ba2689e","Type":"ContainerStarted","Data":"ffef69170ae342fc4aa80276d391d9a0344e64a541ea17d4d0a240b311307e52"} Jan 22 07:32:26 crc kubenswrapper[4982]: I0122 07:32:26.994183 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"39694168-7dd0-449e-b3f7-fdc47ba2689e","Type":"ContainerStarted","Data":"743e3c440183fd392e3212322005cb24c7f5cfcacbbbf0607dfff8c9809d5a51"} Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.034187 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.340537313 podStartE2EDuration="10.034168599s" podCreationTimestamp="2026-01-22 07:32:17 +0000 UTC" firstStartedPulling="2026-01-22 07:32:18.58152116 +0000 UTC m=+6399.420159163" lastFinishedPulling="2026-01-22 07:32:25.275152446 +0000 UTC m=+6406.113790449" observedRunningTime="2026-01-22 07:32:27.031955599 +0000 UTC m=+6407.870593622" watchObservedRunningTime="2026-01-22 07:32:27.034168599 +0000 UTC m=+6407.872806602" Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.564934 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.606309 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.699362 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.781375 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:32:27 crc kubenswrapper[4982]: I0122 07:32:27.781638 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="dnsmasq-dns" containerID="cri-o://6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e" gracePeriod=10 Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.854748 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.934571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config\") pod \"cfe2c386-c017-4c40-add9-65ba1dc29f41\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.934683 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prvbn\" (UniqueName: \"kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn\") pod \"cfe2c386-c017-4c40-add9-65ba1dc29f41\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.934929 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb\") pod \"cfe2c386-c017-4c40-add9-65ba1dc29f41\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.935003 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc\") pod \"cfe2c386-c017-4c40-add9-65ba1dc29f41\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.935072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb\") pod \"cfe2c386-c017-4c40-add9-65ba1dc29f41\" (UID: \"cfe2c386-c017-4c40-add9-65ba1dc29f41\") " Jan 22 07:32:28 crc kubenswrapper[4982]: I0122 07:32:28.965180 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn" (OuterVolumeSpecName: "kube-api-access-prvbn") pod "cfe2c386-c017-4c40-add9-65ba1dc29f41" (UID: "cfe2c386-c017-4c40-add9-65ba1dc29f41"). InnerVolumeSpecName "kube-api-access-prvbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:28.999952 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cfe2c386-c017-4c40-add9-65ba1dc29f41" (UID: "cfe2c386-c017-4c40-add9-65ba1dc29f41"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.004933 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cfe2c386-c017-4c40-add9-65ba1dc29f41" (UID: "cfe2c386-c017-4c40-add9-65ba1dc29f41"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.016281 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cfe2c386-c017-4c40-add9-65ba1dc29f41" (UID: "cfe2c386-c017-4c40-add9-65ba1dc29f41"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.019143 4982 generic.go:334] "Generic (PLEG): container finished" podID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerID="6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e" exitCode=0 Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.019371 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" event={"ID":"cfe2c386-c017-4c40-add9-65ba1dc29f41","Type":"ContainerDied","Data":"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e"} Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.019433 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" event={"ID":"cfe2c386-c017-4c40-add9-65ba1dc29f41","Type":"ContainerDied","Data":"bb5ecb83e1c79d2847f3bfdcce8b6ea3942288b722682373f36e37290655c093"} Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.019453 4982 scope.go:117] "RemoveContainer" containerID="6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.019482 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.020150 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config" (OuterVolumeSpecName: "config") pod "cfe2c386-c017-4c40-add9-65ba1dc29f41" (UID: "cfe2c386-c017-4c40-add9-65ba1dc29f41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.037744 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.037774 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.037782 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.037791 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe2c386-c017-4c40-add9-65ba1dc29f41-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.037801 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prvbn\" (UniqueName: \"kubernetes.io/projected/cfe2c386-c017-4c40-add9-65ba1dc29f41-kube-api-access-prvbn\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.117384 4982 scope.go:117] "RemoveContainer" containerID="eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.138750 4982 scope.go:117] "RemoveContainer" containerID="6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e" Jan 22 07:32:29 crc kubenswrapper[4982]: E0122 07:32:29.140126 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e\": container with ID starting with 6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e not found: ID does not exist" containerID="6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.140168 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e"} err="failed to get container status \"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e\": rpc error: code = NotFound desc = could not find container \"6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e\": container with ID starting with 6a3198c8f30a8a2c3b0388bc739fd51dae391960801d0b6e4fdb95a636fcfc5e not found: ID does not exist" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.140195 4982 scope.go:117] "RemoveContainer" containerID="eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d" Jan 22 07:32:29 crc kubenswrapper[4982]: E0122 07:32:29.141121 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d\": container with ID starting with eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d not found: ID does not exist" containerID="eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.141147 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d"} err="failed to get container status \"eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d\": rpc error: code = NotFound desc = could not find container \"eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d\": container with ID starting with eff10a39f1b8bb926c647bc6892d13717f734e625a7d295525d9e5cc1c89288d not found: ID does not exist" Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.366048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.378389 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c599684d7-dhnf7"] Jan 22 07:32:29 crc kubenswrapper[4982]: I0122 07:32:29.737659 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" path="/var/lib/kubelet/pods/cfe2c386-c017-4c40-add9-65ba1dc29f41/volumes" Jan 22 07:32:30 crc kubenswrapper[4982]: I0122 07:32:30.311629 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:30 crc kubenswrapper[4982]: I0122 07:32:30.312688 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-central-agent" containerID="cri-o://027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7" gracePeriod=30 Jan 22 07:32:30 crc kubenswrapper[4982]: I0122 07:32:30.313232 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="proxy-httpd" containerID="cri-o://e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa" gracePeriod=30 Jan 22 07:32:30 crc kubenswrapper[4982]: I0122 07:32:30.313296 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="sg-core" containerID="cri-o://f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234" gracePeriod=30 Jan 22 07:32:30 crc kubenswrapper[4982]: I0122 07:32:30.313347 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-notification-agent" containerID="cri-o://bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06" gracePeriod=30 Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.064993 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7afd50d-f3ce-4801-9879-294147a9435a" containerID="e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa" exitCode=0 Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.065022 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7afd50d-f3ce-4801-9879-294147a9435a" containerID="f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234" exitCode=2 Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.065030 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7afd50d-f3ce-4801-9879-294147a9435a" containerID="027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7" exitCode=0 Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.065048 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerDied","Data":"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa"} Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.065074 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerDied","Data":"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234"} Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.065085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerDied","Data":"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7"} Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.635227 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.696688 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.696737 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.696804 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.696907 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.696997 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b2jq\" (UniqueName: \"kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.697043 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.697077 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd\") pod \"e7afd50d-f3ce-4801-9879-294147a9435a\" (UID: \"e7afd50d-f3ce-4801-9879-294147a9435a\") " Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.697514 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.697703 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.704009 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts" (OuterVolumeSpecName: "scripts") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.715201 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq" (OuterVolumeSpecName: "kube-api-access-6b2jq") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "kube-api-access-6b2jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.762487 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.777264 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799289 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b2jq\" (UniqueName: \"kubernetes.io/projected/e7afd50d-f3ce-4801-9879-294147a9435a-kube-api-access-6b2jq\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799317 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799326 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7afd50d-f3ce-4801-9879-294147a9435a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799337 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799346 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.799355 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.834054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data" (OuterVolumeSpecName: "config-data") pod "e7afd50d-f3ce-4801-9879-294147a9435a" (UID: "e7afd50d-f3ce-4801-9879-294147a9435a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:32:31 crc kubenswrapper[4982]: I0122 07:32:31.901241 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7afd50d-f3ce-4801-9879-294147a9435a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.077538 4982 generic.go:334] "Generic (PLEG): container finished" podID="e7afd50d-f3ce-4801-9879-294147a9435a" containerID="bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06" exitCode=0 Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.077616 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerDied","Data":"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06"} Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.077955 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7afd50d-f3ce-4801-9879-294147a9435a","Type":"ContainerDied","Data":"67c3aefc92fcfd138bb92f9db1d84b2f8c87caed46c9be0af4477fb05d2b5850"} Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.077979 4982 scope.go:117] "RemoveContainer" containerID="e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.077641 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.109750 4982 scope.go:117] "RemoveContainer" containerID="f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.151253 4982 scope.go:117] "RemoveContainer" containerID="bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.153164 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.168059 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.181447 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.181976 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="proxy-httpd" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182004 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="proxy-httpd" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.182027 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="sg-core" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182035 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="sg-core" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.182046 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="init" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182052 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="init" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.182062 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-notification-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182067 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-notification-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.182084 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="dnsmasq-dns" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182090 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="dnsmasq-dns" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.182105 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-central-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182111 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-central-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182295 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-central-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182306 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="ceilometer-notification-agent" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182316 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="sg-core" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182334 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" containerName="proxy-httpd" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182343 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="dnsmasq-dns" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.182928 4982 scope.go:117] "RemoveContainer" containerID="027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.184916 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.186622 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.186830 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.193587 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.212535 4982 scope.go:117] "RemoveContainer" containerID="e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.213003 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa\": container with ID starting with e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa not found: ID does not exist" containerID="e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.213334 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa"} err="failed to get container status \"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa\": rpc error: code = NotFound desc = could not find container \"e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa\": container with ID starting with e626abd434ff551017ec8fea47c08292e265b24e03abfba30d2f934f040656aa not found: ID does not exist" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.213420 4982 scope.go:117] "RemoveContainer" containerID="f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.213740 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234\": container with ID starting with f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234 not found: ID does not exist" containerID="f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.213825 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234"} err="failed to get container status \"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234\": rpc error: code = NotFound desc = could not find container \"f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234\": container with ID starting with f6679244faadef74edbfc5c365b0641a6783edac75b6f8e27a774e07f135b234 not found: ID does not exist" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.213911 4982 scope.go:117] "RemoveContainer" containerID="bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.214365 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06\": container with ID starting with bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06 not found: ID does not exist" containerID="bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.214390 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06"} err="failed to get container status \"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06\": rpc error: code = NotFound desc = could not find container \"bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06\": container with ID starting with bff7164ef40af5c143bc68c5d3fe90d2963ac36358f1af3cda42ea6088a05a06 not found: ID does not exist" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.214409 4982 scope.go:117] "RemoveContainer" containerID="027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7" Jan 22 07:32:32 crc kubenswrapper[4982]: E0122 07:32:32.214731 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7\": container with ID starting with 027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7 not found: ID does not exist" containerID="027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.214752 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7"} err="failed to get container status \"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7\": rpc error: code = NotFound desc = could not find container \"027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7\": container with ID starting with 027b9abbb471487b47bd4a64c7c780c738a9c49a48ebcb7723915f3c9827fff7 not found: ID does not exist" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232047 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232141 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232247 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-scripts\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232325 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxqcr\" (UniqueName: \"kubernetes.io/projected/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-kube-api-access-jxqcr\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232369 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.232394 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-config-data\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334157 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-scripts\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334276 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxqcr\" (UniqueName: \"kubernetes.io/projected/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-kube-api-access-jxqcr\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334308 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334437 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-config-data\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334491 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334637 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.334794 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.335394 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-log-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.335714 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-run-httpd\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.339927 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.341192 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-config-data\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.341357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-scripts\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.341945 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.351758 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxqcr\" (UniqueName: \"kubernetes.io/projected/34fb3c90-dfaf-44b1-a53a-b07bca95e3b3-kube-api-access-jxqcr\") pod \"ceilometer-0\" (UID: \"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3\") " pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.505898 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 07:32:32 crc kubenswrapper[4982]: I0122 07:32:32.985026 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 07:32:33 crc kubenswrapper[4982]: I0122 07:32:33.087615 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3","Type":"ContainerStarted","Data":"88a65b2d22095b9556da35b36867cc04b3e562727b7d4ade9013a4ad5b9cdab3"} Jan 22 07:32:33 crc kubenswrapper[4982]: I0122 07:32:33.739811 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7afd50d-f3ce-4801-9879-294147a9435a" path="/var/lib/kubelet/pods/e7afd50d-f3ce-4801-9879-294147a9435a/volumes" Jan 22 07:32:33 crc kubenswrapper[4982]: I0122 07:32:33.806481 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c599684d7-dhnf7" podUID="cfe2c386-c017-4c40-add9-65ba1dc29f41" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.76:5353: i/o timeout" Jan 22 07:32:34 crc kubenswrapper[4982]: I0122 07:32:34.100234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3","Type":"ContainerStarted","Data":"8cbe28995cc1f8bf1cde9c5927c86c7a2ca5e3b03daae6f1cca1d69ce2876a5c"} Jan 22 07:32:35 crc kubenswrapper[4982]: I0122 07:32:35.132953 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3","Type":"ContainerStarted","Data":"2671731f507fd6d454e3b43d4cf7a78ee28b8c16bb452616a50af325a8d5d01b"} Jan 22 07:32:36 crc kubenswrapper[4982]: I0122 07:32:36.147013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3","Type":"ContainerStarted","Data":"354059546f9ad5fc69989c311ff2155c57221529e9abefc332f7a8967c77eefb"} Jan 22 07:32:37 crc kubenswrapper[4982]: I0122 07:32:37.157330 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34fb3c90-dfaf-44b1-a53a-b07bca95e3b3","Type":"ContainerStarted","Data":"7ef9f9abb6cf0a7d50fc84d1071855fed2e01b7890947b2cef7e4f085b8a1b39"} Jan 22 07:32:37 crc kubenswrapper[4982]: I0122 07:32:37.157721 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 07:32:37 crc kubenswrapper[4982]: I0122 07:32:37.181453 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.72796896 podStartE2EDuration="5.181437574s" podCreationTimestamp="2026-01-22 07:32:32 +0000 UTC" firstStartedPulling="2026-01-22 07:32:32.994885433 +0000 UTC m=+6413.833523446" lastFinishedPulling="2026-01-22 07:32:36.448354057 +0000 UTC m=+6417.286992060" observedRunningTime="2026-01-22 07:32:37.177612752 +0000 UTC m=+6418.016250755" watchObservedRunningTime="2026-01-22 07:32:37.181437574 +0000 UTC m=+6418.020075577" Jan 22 07:32:39 crc kubenswrapper[4982]: I0122 07:32:39.134406 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Jan 22 07:32:39 crc kubenswrapper[4982]: I0122 07:32:39.163643 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Jan 22 07:32:39 crc kubenswrapper[4982]: I0122 07:32:39.446567 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Jan 22 07:33:02 crc kubenswrapper[4982]: I0122 07:33:02.512756 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 07:33:18 crc kubenswrapper[4982]: I0122 07:33:18.973963 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:33:18 crc kubenswrapper[4982]: I0122 07:33:18.974398 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:33:23 crc kubenswrapper[4982]: I0122 07:33:23.890640 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:23 crc kubenswrapper[4982]: I0122 07:33:23.893622 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:23 crc kubenswrapper[4982]: I0122 07:33:23.909041 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:23 crc kubenswrapper[4982]: I0122 07:33:23.936152 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.052462 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.052971 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.053138 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.053257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.053638 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdm49\" (UniqueName: \"kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.053751 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.155539 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.155620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.155735 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdm49\" (UniqueName: \"kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.155769 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.155809 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.156092 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.156700 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.156925 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.156932 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.157134 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.157446 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.182107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdm49\" (UniqueName: \"kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49\") pod \"dnsmasq-dns-6d47b8c857-227ct\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.254624 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:24 crc kubenswrapper[4982]: I0122 07:33:24.738513 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:25 crc kubenswrapper[4982]: I0122 07:33:25.674994 4982 generic.go:334] "Generic (PLEG): container finished" podID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerID="e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74" exitCode=0 Jan 22 07:33:25 crc kubenswrapper[4982]: I0122 07:33:25.675545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" event={"ID":"050ac04b-1e81-48fb-85e0-adb14f4effb1","Type":"ContainerDied","Data":"e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74"} Jan 22 07:33:25 crc kubenswrapper[4982]: I0122 07:33:25.675593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" event={"ID":"050ac04b-1e81-48fb-85e0-adb14f4effb1","Type":"ContainerStarted","Data":"af52c98c8a6f723b794b249c8f17fe9735912a789c7a129a07dc821974fc44e3"} Jan 22 07:33:26 crc kubenswrapper[4982]: I0122 07:33:26.688399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" event={"ID":"050ac04b-1e81-48fb-85e0-adb14f4effb1","Type":"ContainerStarted","Data":"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b"} Jan 22 07:33:26 crc kubenswrapper[4982]: I0122 07:33:26.689393 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:26 crc kubenswrapper[4982]: I0122 07:33:26.710716 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" podStartSLOduration=3.710692849 podStartE2EDuration="3.710692849s" podCreationTimestamp="2026-01-22 07:33:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:33:26.706104376 +0000 UTC m=+6467.544742399" watchObservedRunningTime="2026-01-22 07:33:26.710692849 +0000 UTC m=+6467.549330852" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.256411 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.373131 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.373736 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="dnsmasq-dns" containerID="cri-o://8717ab7e46bcf6964750786b6e3c7b4fff1cca1be458e84aec2fee1d461b7103" gracePeriod=10 Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.552630 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79c985c4bf-l49md"] Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.555092 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.587938 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79c985c4bf-l49md"] Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629297 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-config\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-sb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629508 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmlrp\" (UniqueName: \"kubernetes.io/projected/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-kube-api-access-zmlrp\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629536 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-openstack-cell1\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-nb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.629898 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-dns-svc\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.733233 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmlrp\" (UniqueName: \"kubernetes.io/projected/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-kube-api-access-zmlrp\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.733303 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-openstack-cell1\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.733907 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-nb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.733985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-dns-svc\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.734119 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-config\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.734168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-sb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.734289 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-openstack-cell1\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.734766 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-nb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.735044 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-dns-svc\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.735136 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-config\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.735630 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-ovsdbserver-sb\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.758811 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmlrp\" (UniqueName: \"kubernetes.io/projected/00dfc991-c4f4-48d0-8cba-e45257f5fbf9-kube-api-access-zmlrp\") pod \"dnsmasq-dns-79c985c4bf-l49md\" (UID: \"00dfc991-c4f4-48d0-8cba-e45257f5fbf9\") " pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.771915 4982 generic.go:334] "Generic (PLEG): container finished" podID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerID="8717ab7e46bcf6964750786b6e3c7b4fff1cca1be458e84aec2fee1d461b7103" exitCode=0 Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.771981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" event={"ID":"5c5e4058-2c05-4b26-9364-f828e9e01253","Type":"ContainerDied","Data":"8717ab7e46bcf6964750786b6e3c7b4fff1cca1be458e84aec2fee1d461b7103"} Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.882208 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:34 crc kubenswrapper[4982]: I0122 07:33:34.997967 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.041378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtnpk\" (UniqueName: \"kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk\") pod \"5c5e4058-2c05-4b26-9364-f828e9e01253\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.041788 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb\") pod \"5c5e4058-2c05-4b26-9364-f828e9e01253\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.041982 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config\") pod \"5c5e4058-2c05-4b26-9364-f828e9e01253\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.042008 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb\") pod \"5c5e4058-2c05-4b26-9364-f828e9e01253\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.042060 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc\") pod \"5c5e4058-2c05-4b26-9364-f828e9e01253\" (UID: \"5c5e4058-2c05-4b26-9364-f828e9e01253\") " Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.048709 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk" (OuterVolumeSpecName: "kube-api-access-wtnpk") pod "5c5e4058-2c05-4b26-9364-f828e9e01253" (UID: "5c5e4058-2c05-4b26-9364-f828e9e01253"). InnerVolumeSpecName "kube-api-access-wtnpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.100715 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config" (OuterVolumeSpecName: "config") pod "5c5e4058-2c05-4b26-9364-f828e9e01253" (UID: "5c5e4058-2c05-4b26-9364-f828e9e01253"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.104771 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c5e4058-2c05-4b26-9364-f828e9e01253" (UID: "5c5e4058-2c05-4b26-9364-f828e9e01253"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.105780 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c5e4058-2c05-4b26-9364-f828e9e01253" (UID: "5c5e4058-2c05-4b26-9364-f828e9e01253"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.115095 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c5e4058-2c05-4b26-9364-f828e9e01253" (UID: "5c5e4058-2c05-4b26-9364-f828e9e01253"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.148935 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtnpk\" (UniqueName: \"kubernetes.io/projected/5c5e4058-2c05-4b26-9364-f828e9e01253-kube-api-access-wtnpk\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.148963 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.148973 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.148982 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.148991 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c5e4058-2c05-4b26-9364-f828e9e01253-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.418966 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79c985c4bf-l49md"] Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.784845 4982 generic.go:334] "Generic (PLEG): container finished" podID="00dfc991-c4f4-48d0-8cba-e45257f5fbf9" containerID="e1355976516e743676e14138a29fffe908d02fe7b42c23191047b42406ec6989" exitCode=0 Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.784956 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" event={"ID":"00dfc991-c4f4-48d0-8cba-e45257f5fbf9","Type":"ContainerDied","Data":"e1355976516e743676e14138a29fffe908d02fe7b42c23191047b42406ec6989"} Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.785513 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" event={"ID":"00dfc991-c4f4-48d0-8cba-e45257f5fbf9","Type":"ContainerStarted","Data":"4442d6ffff302b4615667e9fecc3c7d2553b484a551e20ac7c9553bbcfe674fc"} Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.788844 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" event={"ID":"5c5e4058-2c05-4b26-9364-f828e9e01253","Type":"ContainerDied","Data":"9dca691a800b5a0e15688d52367554b96ea46b9967c6c7fc3bb742defbeb288c"} Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.788963 4982 scope.go:117] "RemoveContainer" containerID="8717ab7e46bcf6964750786b6e3c7b4fff1cca1be458e84aec2fee1d461b7103" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.789090 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8589d7b9f9-lz752" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.948393 4982 scope.go:117] "RemoveContainer" containerID="0040f63e6c49227c890af705123fda8efc736f42330d5b991bd31624e5b5e7d9" Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.970649 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:33:35 crc kubenswrapper[4982]: I0122 07:33:35.986069 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8589d7b9f9-lz752"] Jan 22 07:33:36 crc kubenswrapper[4982]: E0122 07:33:36.049678 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c5e4058_2c05_4b26_9364_f828e9e01253.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c5e4058_2c05_4b26_9364_f828e9e01253.slice/crio-9dca691a800b5a0e15688d52367554b96ea46b9967c6c7fc3bb742defbeb288c\": RecentStats: unable to find data in memory cache]" Jan 22 07:33:36 crc kubenswrapper[4982]: I0122 07:33:36.818241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" event={"ID":"00dfc991-c4f4-48d0-8cba-e45257f5fbf9","Type":"ContainerStarted","Data":"7a4732cb4cbde5b67a0a7a92260f95be648d46abc1e683b158699599f8ef5a8c"} Jan 22 07:33:36 crc kubenswrapper[4982]: I0122 07:33:36.818697 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:36 crc kubenswrapper[4982]: I0122 07:33:36.851468 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" podStartSLOduration=2.8514372 podStartE2EDuration="2.8514372s" podCreationTimestamp="2026-01-22 07:33:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:33:36.837410271 +0000 UTC m=+6477.676048284" watchObservedRunningTime="2026-01-22 07:33:36.8514372 +0000 UTC m=+6477.690075223" Jan 22 07:33:37 crc kubenswrapper[4982]: I0122 07:33:37.732131 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" path="/var/lib/kubelet/pods/5c5e4058-2c05-4b26-9364-f828e9e01253/volumes" Jan 22 07:33:44 crc kubenswrapper[4982]: I0122 07:33:44.884420 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79c985c4bf-l49md" Jan 22 07:33:44 crc kubenswrapper[4982]: I0122 07:33:44.961341 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:44 crc kubenswrapper[4982]: I0122 07:33:44.961572 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="dnsmasq-dns" containerID="cri-o://755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b" gracePeriod=10 Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.499492 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694258 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694632 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694694 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694799 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694836 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.694888 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdm49\" (UniqueName: \"kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49\") pod \"050ac04b-1e81-48fb-85e0-adb14f4effb1\" (UID: \"050ac04b-1e81-48fb-85e0-adb14f4effb1\") " Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.715270 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49" (OuterVolumeSpecName: "kube-api-access-tdm49") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "kube-api-access-tdm49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.759630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config" (OuterVolumeSpecName: "config") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.784336 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.784363 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.786022 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.791155 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "050ac04b-1e81-48fb-85e0-adb14f4effb1" (UID: "050ac04b-1e81-48fb-85e0-adb14f4effb1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797656 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-config\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797694 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdm49\" (UniqueName: \"kubernetes.io/projected/050ac04b-1e81-48fb-85e0-adb14f4effb1-kube-api-access-tdm49\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797709 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797723 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797734 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.797745 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/050ac04b-1e81-48fb-85e0-adb14f4effb1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.948805 4982 generic.go:334] "Generic (PLEG): container finished" podID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerID="755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b" exitCode=0 Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.948901 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.948916 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" event={"ID":"050ac04b-1e81-48fb-85e0-adb14f4effb1","Type":"ContainerDied","Data":"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b"} Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.950145 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d47b8c857-227ct" event={"ID":"050ac04b-1e81-48fb-85e0-adb14f4effb1","Type":"ContainerDied","Data":"af52c98c8a6f723b794b249c8f17fe9735912a789c7a129a07dc821974fc44e3"} Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.950196 4982 scope.go:117] "RemoveContainer" containerID="755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.972616 4982 scope.go:117] "RemoveContainer" containerID="e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74" Jan 22 07:33:45 crc kubenswrapper[4982]: I0122 07:33:45.989727 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:46 crc kubenswrapper[4982]: I0122 07:33:46.003428 4982 scope.go:117] "RemoveContainer" containerID="755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b" Jan 22 07:33:46 crc kubenswrapper[4982]: E0122 07:33:46.003992 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b\": container with ID starting with 755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b not found: ID does not exist" containerID="755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b" Jan 22 07:33:46 crc kubenswrapper[4982]: I0122 07:33:46.004036 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b"} err="failed to get container status \"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b\": rpc error: code = NotFound desc = could not find container \"755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b\": container with ID starting with 755713527c757aab4b3f35bece527f41eaa99108f52e968188131ed1f19eda1b not found: ID does not exist" Jan 22 07:33:46 crc kubenswrapper[4982]: I0122 07:33:46.004062 4982 scope.go:117] "RemoveContainer" containerID="e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74" Jan 22 07:33:46 crc kubenswrapper[4982]: I0122 07:33:46.004515 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d47b8c857-227ct"] Jan 22 07:33:46 crc kubenswrapper[4982]: E0122 07:33:46.004561 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74\": container with ID starting with e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74 not found: ID does not exist" containerID="e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74" Jan 22 07:33:46 crc kubenswrapper[4982]: I0122 07:33:46.004593 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74"} err="failed to get container status \"e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74\": rpc error: code = NotFound desc = could not find container \"e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74\": container with ID starting with e08003d6ecef4671d297ca04b6086765591c9e6c0066c9b00d39ee2bbba09a74 not found: ID does not exist" Jan 22 07:33:47 crc kubenswrapper[4982]: I0122 07:33:47.743559 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" path="/var/lib/kubelet/pods/050ac04b-1e81-48fb-85e0-adb14f4effb1/volumes" Jan 22 07:33:48 crc kubenswrapper[4982]: I0122 07:33:48.976416 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:33:48 crc kubenswrapper[4982]: I0122 07:33:48.976476 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.179795 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f"] Jan 22 07:33:56 crc kubenswrapper[4982]: E0122 07:33:56.180931 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="init" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.180949 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="init" Jan 22 07:33:56 crc kubenswrapper[4982]: E0122 07:33:56.180960 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="init" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.180966 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="init" Jan 22 07:33:56 crc kubenswrapper[4982]: E0122 07:33:56.180999 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.181005 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: E0122 07:33:56.181013 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.181019 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.181202 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c5e4058-2c05-4b26-9364-f828e9e01253" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.181222 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="050ac04b-1e81-48fb-85e0-adb14f4effb1" containerName="dnsmasq-dns" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.182000 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.187598 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.187695 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.187812 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.188513 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.215967 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f"] Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.335095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvwtg\" (UniqueName: \"kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.335321 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.335424 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.335577 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.335645 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.437164 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.437986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.438038 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.438110 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvwtg\" (UniqueName: \"kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.438243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.443590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.446731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.450253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.452342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.459330 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvwtg\" (UniqueName: \"kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:56 crc kubenswrapper[4982]: I0122 07:33:56.506555 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:33:57 crc kubenswrapper[4982]: I0122 07:33:57.060092 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f"] Jan 22 07:33:57 crc kubenswrapper[4982]: W0122 07:33:57.067503 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2cafe5d_fd32_4e7b_befb_77bb775043da.slice/crio-0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00 WatchSource:0}: Error finding container 0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00: Status 404 returned error can't find the container with id 0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00 Jan 22 07:33:57 crc kubenswrapper[4982]: I0122 07:33:57.082635 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" event={"ID":"d2cafe5d-fd32-4e7b-befb-77bb775043da","Type":"ContainerStarted","Data":"0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00"} Jan 22 07:34:05 crc kubenswrapper[4982]: I0122 07:34:05.924698 4982 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5c5e4058-2c05-4b26-9364-f828e9e01253"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5c5e4058-2c05-4b26-9364-f828e9e01253] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5c5e4058_2c05_4b26_9364_f828e9e01253.slice" Jan 22 07:34:07 crc kubenswrapper[4982]: I0122 07:34:07.187492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" event={"ID":"d2cafe5d-fd32-4e7b-befb-77bb775043da","Type":"ContainerStarted","Data":"956122aa1f6a9a604a00a1d5e1366c236c3a1bb7cd4f124e49e45a0993aec423"} Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.059246 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" podStartSLOduration=12.31498222 podStartE2EDuration="22.059216622s" podCreationTimestamp="2026-01-22 07:33:56 +0000 UTC" firstStartedPulling="2026-01-22 07:33:57.070370901 +0000 UTC m=+6497.909008904" lastFinishedPulling="2026-01-22 07:34:06.814605303 +0000 UTC m=+6507.653243306" observedRunningTime="2026-01-22 07:34:07.208009816 +0000 UTC m=+6508.046647839" watchObservedRunningTime="2026-01-22 07:34:18.059216622 +0000 UTC m=+6518.897854645" Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.073463 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-l5lqp"] Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.086655 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-l5lqp"] Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.973960 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.974330 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.974382 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.975240 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:34:18 crc kubenswrapper[4982]: I0122 07:34:18.975302 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a" gracePeriod=600 Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.322165 4982 generic.go:334] "Generic (PLEG): container finished" podID="d2cafe5d-fd32-4e7b-befb-77bb775043da" containerID="956122aa1f6a9a604a00a1d5e1366c236c3a1bb7cd4f124e49e45a0993aec423" exitCode=0 Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.322258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" event={"ID":"d2cafe5d-fd32-4e7b-befb-77bb775043da","Type":"ContainerDied","Data":"956122aa1f6a9a604a00a1d5e1366c236c3a1bb7cd4f124e49e45a0993aec423"} Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.326906 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a" exitCode=0 Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.326957 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a"} Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.326993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736"} Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.327021 4982 scope.go:117] "RemoveContainer" containerID="e86d7de3d5541f26c30d5e3150518e2a21550ebe9adbca61610c9190dc552078" Jan 22 07:34:19 crc kubenswrapper[4982]: I0122 07:34:19.733746 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68979bb0-36e1-4068-9700-f3eba7180048" path="/var/lib/kubelet/pods/68979bb0-36e1-4068-9700-f3eba7180048/volumes" Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.040790 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-8c4e-account-create-update-kbxq5"] Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.056070 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-8c4e-account-create-update-kbxq5"] Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.864957 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.994396 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvwtg\" (UniqueName: \"kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg\") pod \"d2cafe5d-fd32-4e7b-befb-77bb775043da\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.994542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph\") pod \"d2cafe5d-fd32-4e7b-befb-77bb775043da\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.994643 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory\") pod \"d2cafe5d-fd32-4e7b-befb-77bb775043da\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.994806 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1\") pod \"d2cafe5d-fd32-4e7b-befb-77bb775043da\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " Jan 22 07:34:20 crc kubenswrapper[4982]: I0122 07:34:20.994918 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle\") pod \"d2cafe5d-fd32-4e7b-befb-77bb775043da\" (UID: \"d2cafe5d-fd32-4e7b-befb-77bb775043da\") " Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.000280 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "d2cafe5d-fd32-4e7b-befb-77bb775043da" (UID: "d2cafe5d-fd32-4e7b-befb-77bb775043da"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.003068 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg" (OuterVolumeSpecName: "kube-api-access-lvwtg") pod "d2cafe5d-fd32-4e7b-befb-77bb775043da" (UID: "d2cafe5d-fd32-4e7b-befb-77bb775043da"). InnerVolumeSpecName "kube-api-access-lvwtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.003206 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph" (OuterVolumeSpecName: "ceph") pod "d2cafe5d-fd32-4e7b-befb-77bb775043da" (UID: "d2cafe5d-fd32-4e7b-befb-77bb775043da"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.037424 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "d2cafe5d-fd32-4e7b-befb-77bb775043da" (UID: "d2cafe5d-fd32-4e7b-befb-77bb775043da"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.042100 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory" (OuterVolumeSpecName: "inventory") pod "d2cafe5d-fd32-4e7b-befb-77bb775043da" (UID: "d2cafe5d-fd32-4e7b-befb-77bb775043da"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.097916 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.097949 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.097959 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.097969 4982 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cafe5d-fd32-4e7b-befb-77bb775043da-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.097981 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvwtg\" (UniqueName: \"kubernetes.io/projected/d2cafe5d-fd32-4e7b-befb-77bb775043da-kube-api-access-lvwtg\") on node \"crc\" DevicePath \"\"" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.354272 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" event={"ID":"d2cafe5d-fd32-4e7b-befb-77bb775043da","Type":"ContainerDied","Data":"0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00"} Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.354321 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b8dea971161be1ed5b6154d6e2e3d323e48b5761b705221ccc833d66dc31b00" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.354357 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f" Jan 22 07:34:21 crc kubenswrapper[4982]: I0122 07:34:21.738994 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="063d2222-26e8-4ffb-8210-e198da9b286b" path="/var/lib/kubelet/pods/063d2222-26e8-4ffb-8210-e198da9b286b/volumes" Jan 22 07:34:26 crc kubenswrapper[4982]: I0122 07:34:26.041909 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-kxk2n"] Jan 22 07:34:26 crc kubenswrapper[4982]: I0122 07:34:26.056032 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-kxk2n"] Jan 22 07:34:27 crc kubenswrapper[4982]: I0122 07:34:27.077906 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-fc79-account-create-update-s6n7r"] Jan 22 07:34:27 crc kubenswrapper[4982]: I0122 07:34:27.091514 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-fc79-account-create-update-s6n7r"] Jan 22 07:34:27 crc kubenswrapper[4982]: I0122 07:34:27.740127 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="198a37c2-63fa-4483-8874-af94a9f1f5de" path="/var/lib/kubelet/pods/198a37c2-63fa-4483-8874-af94a9f1f5de/volumes" Jan 22 07:34:27 crc kubenswrapper[4982]: I0122 07:34:27.741492 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72af17c0-2e69-455f-8418-bff72b08046b" path="/var/lib/kubelet/pods/72af17c0-2e69-455f-8418-bff72b08046b/volumes" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.299559 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp"] Jan 22 07:34:29 crc kubenswrapper[4982]: E0122 07:34:29.300639 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2cafe5d-fd32-4e7b-befb-77bb775043da" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.300665 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2cafe5d-fd32-4e7b-befb-77bb775043da" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.301112 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2cafe5d-fd32-4e7b-befb-77bb775043da" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.302181 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.305880 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.305986 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.306107 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.306213 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.315102 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp"] Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.412957 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.413097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.413148 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.413266 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.413409 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kxx2\" (UniqueName: \"kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.515675 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kxx2\" (UniqueName: \"kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.515981 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.516097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.516146 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.516212 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.521413 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.523833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.524142 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.524314 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.532259 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kxx2\" (UniqueName: \"kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:29 crc kubenswrapper[4982]: I0122 07:34:29.640056 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:34:30 crc kubenswrapper[4982]: I0122 07:34:30.239034 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp"] Jan 22 07:34:30 crc kubenswrapper[4982]: W0122 07:34:30.239316 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0001918_c9c5_4f77_8d8a_c0021c280883.slice/crio-796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc WatchSource:0}: Error finding container 796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc: Status 404 returned error can't find the container with id 796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc Jan 22 07:34:30 crc kubenswrapper[4982]: I0122 07:34:30.449442 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" event={"ID":"a0001918-c9c5-4f77-8d8a-c0021c280883","Type":"ContainerStarted","Data":"796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc"} Jan 22 07:34:31 crc kubenswrapper[4982]: I0122 07:34:31.463618 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" event={"ID":"a0001918-c9c5-4f77-8d8a-c0021c280883","Type":"ContainerStarted","Data":"eb185425c3833e7a38bd9fff14a33a682ecfd9fdce511289db5bb0e68c2b160d"} Jan 22 07:34:31 crc kubenswrapper[4982]: I0122 07:34:31.485734 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" podStartSLOduration=1.8353405569999999 podStartE2EDuration="2.485708552s" podCreationTimestamp="2026-01-22 07:34:29 +0000 UTC" firstStartedPulling="2026-01-22 07:34:30.244654801 +0000 UTC m=+6531.083292804" lastFinishedPulling="2026-01-22 07:34:30.895022756 +0000 UTC m=+6531.733660799" observedRunningTime="2026-01-22 07:34:31.483289016 +0000 UTC m=+6532.321927049" watchObservedRunningTime="2026-01-22 07:34:31.485708552 +0000 UTC m=+6532.324346585" Jan 22 07:35:07 crc kubenswrapper[4982]: I0122 07:35:07.814021 4982 scope.go:117] "RemoveContainer" containerID="8d017b3574f9aa120fe25a5c340b06c59625793b2ba25362425a54699db1d65a" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.014796 4982 scope.go:117] "RemoveContainer" containerID="2c38061cee1e806912955dd7af5909ad2b4d966df02f1c26d1c36cd41ba34912" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.048224 4982 scope.go:117] "RemoveContainer" containerID="cef9c0b9e5f33e5e1998a211a59e1e86cf6e7d633445bc7eab5813f495745768" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.107892 4982 scope.go:117] "RemoveContainer" containerID="2dc0d63afa106ab83e3d91c78e49bc6f2e76b08fd10599993c145d2acf78598c" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.128260 4982 scope.go:117] "RemoveContainer" containerID="e5832f192dd1ff64fc934156e7f28e9c048c261a0b1fd685deb2d242782c1dff" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.153962 4982 scope.go:117] "RemoveContainer" containerID="f66ba423e5388a3467b72f78b4b8628e57c3f8d5a2fa65e7cd01073a180011cc" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.388893 4982 scope.go:117] "RemoveContainer" containerID="98326b12ee68bb959a6916ffaba5f40ea6eb980e0c8a1e34f426a5e0ae051def" Jan 22 07:35:08 crc kubenswrapper[4982]: I0122 07:35:08.413436 4982 scope.go:117] "RemoveContainer" containerID="df77a0301e1bd55f81631cddda760c76ee044991b3bf82befd39ad5962baa23e" Jan 22 07:36:09 crc kubenswrapper[4982]: I0122 07:36:09.056275 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-9w225"] Jan 22 07:36:09 crc kubenswrapper[4982]: I0122 07:36:09.069401 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-9w225"] Jan 22 07:36:09 crc kubenswrapper[4982]: I0122 07:36:09.741311 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71ed9098-7666-4407-9354-f68f4a17c806" path="/var/lib/kubelet/pods/71ed9098-7666-4407-9354-f68f4a17c806/volumes" Jan 22 07:36:48 crc kubenswrapper[4982]: I0122 07:36:48.973956 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:36:48 crc kubenswrapper[4982]: I0122 07:36:48.974559 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:37:08 crc kubenswrapper[4982]: I0122 07:37:08.627509 4982 scope.go:117] "RemoveContainer" containerID="53be74d469bd1c665b5496d3cb1e5e4bd75fd376564b297ff340710fa9123148" Jan 22 07:37:08 crc kubenswrapper[4982]: I0122 07:37:08.703148 4982 scope.go:117] "RemoveContainer" containerID="7134a9b0e4cb6aa9206ce780dd792f364b72e51246cee8f83586e641de5b75cb" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.467410 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.472268 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.484575 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.485818 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.486106 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7p7j\" (UniqueName: \"kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.486572 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.588563 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7p7j\" (UniqueName: \"kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.588721 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.588804 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.589332 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.589455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.611316 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7p7j\" (UniqueName: \"kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j\") pod \"community-operators-ckn2g\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:10 crc kubenswrapper[4982]: I0122 07:37:10.811068 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:11 crc kubenswrapper[4982]: I0122 07:37:11.427699 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:12 crc kubenswrapper[4982]: I0122 07:37:12.303928 4982 generic.go:334] "Generic (PLEG): container finished" podID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerID="dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f" exitCode=0 Jan 22 07:37:12 crc kubenswrapper[4982]: I0122 07:37:12.304035 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerDied","Data":"dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f"} Jan 22 07:37:12 crc kubenswrapper[4982]: I0122 07:37:12.304578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerStarted","Data":"4a6f0f5163b64113f206bbac828ec11ec0a1d91ce7f381e43ce64e67cb91cb42"} Jan 22 07:37:12 crc kubenswrapper[4982]: I0122 07:37:12.309224 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:37:13 crc kubenswrapper[4982]: I0122 07:37:13.316938 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerStarted","Data":"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859"} Jan 22 07:37:15 crc kubenswrapper[4982]: I0122 07:37:15.336958 4982 generic.go:334] "Generic (PLEG): container finished" podID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerID="bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859" exitCode=0 Jan 22 07:37:15 crc kubenswrapper[4982]: I0122 07:37:15.337055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerDied","Data":"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859"} Jan 22 07:37:16 crc kubenswrapper[4982]: I0122 07:37:16.347409 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerStarted","Data":"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225"} Jan 22 07:37:16 crc kubenswrapper[4982]: I0122 07:37:16.368697 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ckn2g" podStartSLOduration=2.922720842 podStartE2EDuration="6.368680384s" podCreationTimestamp="2026-01-22 07:37:10 +0000 UTC" firstStartedPulling="2026-01-22 07:37:12.309007975 +0000 UTC m=+6693.147645978" lastFinishedPulling="2026-01-22 07:37:15.754967497 +0000 UTC m=+6696.593605520" observedRunningTime="2026-01-22 07:37:16.36409176 +0000 UTC m=+6697.202729763" watchObservedRunningTime="2026-01-22 07:37:16.368680384 +0000 UTC m=+6697.207318377" Jan 22 07:37:18 crc kubenswrapper[4982]: I0122 07:37:18.974390 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:37:18 crc kubenswrapper[4982]: I0122 07:37:18.975037 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:37:20 crc kubenswrapper[4982]: I0122 07:37:20.812229 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:20 crc kubenswrapper[4982]: I0122 07:37:20.812680 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:20 crc kubenswrapper[4982]: I0122 07:37:20.889483 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:21 crc kubenswrapper[4982]: I0122 07:37:21.456915 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:21 crc kubenswrapper[4982]: I0122 07:37:21.534680 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:23 crc kubenswrapper[4982]: I0122 07:37:23.414741 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ckn2g" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="registry-server" containerID="cri-o://4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225" gracePeriod=2 Jan 22 07:37:23 crc kubenswrapper[4982]: I0122 07:37:23.938296 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.086378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities\") pod \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.086494 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7p7j\" (UniqueName: \"kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j\") pod \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.086637 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content\") pod \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\" (UID: \"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92\") " Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.087332 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities" (OuterVolumeSpecName: "utilities") pod "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" (UID: "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.087621 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.098157 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j" (OuterVolumeSpecName: "kube-api-access-l7p7j") pod "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" (UID: "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92"). InnerVolumeSpecName "kube-api-access-l7p7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.143112 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" (UID: "a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.190310 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.190355 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7p7j\" (UniqueName: \"kubernetes.io/projected/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92-kube-api-access-l7p7j\") on node \"crc\" DevicePath \"\"" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.434636 4982 generic.go:334] "Generic (PLEG): container finished" podID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerID="4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225" exitCode=0 Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.434712 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerDied","Data":"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225"} Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.434769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckn2g" event={"ID":"a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92","Type":"ContainerDied","Data":"4a6f0f5163b64113f206bbac828ec11ec0a1d91ce7f381e43ce64e67cb91cb42"} Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.434770 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckn2g" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.434795 4982 scope.go:117] "RemoveContainer" containerID="4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.461045 4982 scope.go:117] "RemoveContainer" containerID="bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.494408 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.503094 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ckn2g"] Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.513484 4982 scope.go:117] "RemoveContainer" containerID="dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.546883 4982 scope.go:117] "RemoveContainer" containerID="4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225" Jan 22 07:37:24 crc kubenswrapper[4982]: E0122 07:37:24.547524 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225\": container with ID starting with 4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225 not found: ID does not exist" containerID="4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.547559 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225"} err="failed to get container status \"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225\": rpc error: code = NotFound desc = could not find container \"4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225\": container with ID starting with 4edababc845b976b0bf051c40d4a7116917c10c788d470987d8dae98c86fd225 not found: ID does not exist" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.547584 4982 scope.go:117] "RemoveContainer" containerID="bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859" Jan 22 07:37:24 crc kubenswrapper[4982]: E0122 07:37:24.547883 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859\": container with ID starting with bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859 not found: ID does not exist" containerID="bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.547904 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859"} err="failed to get container status \"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859\": rpc error: code = NotFound desc = could not find container \"bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859\": container with ID starting with bb911c98c3b3cd8d5df22a138c46eb3ad0b6976b919cf7fcf627577bb8c98859 not found: ID does not exist" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.547921 4982 scope.go:117] "RemoveContainer" containerID="dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f" Jan 22 07:37:24 crc kubenswrapper[4982]: E0122 07:37:24.548159 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f\": container with ID starting with dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f not found: ID does not exist" containerID="dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f" Jan 22 07:37:24 crc kubenswrapper[4982]: I0122 07:37:24.548202 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f"} err="failed to get container status \"dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f\": rpc error: code = NotFound desc = could not find container \"dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f\": container with ID starting with dbca7fb3d7110d4024683308a49f25a3d8de63d3971c9ff80325ddbce46ed46f not found: ID does not exist" Jan 22 07:37:25 crc kubenswrapper[4982]: I0122 07:37:25.739603 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" path="/var/lib/kubelet/pods/a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92/volumes" Jan 22 07:37:48 crc kubenswrapper[4982]: I0122 07:37:48.973497 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:37:48 crc kubenswrapper[4982]: I0122 07:37:48.974091 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:37:48 crc kubenswrapper[4982]: I0122 07:37:48.974138 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:37:48 crc kubenswrapper[4982]: I0122 07:37:48.974823 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:37:48 crc kubenswrapper[4982]: I0122 07:37:48.974904 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" gracePeriod=600 Jan 22 07:37:49 crc kubenswrapper[4982]: E0122 07:37:49.604913 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:37:49 crc kubenswrapper[4982]: I0122 07:37:49.700554 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" exitCode=0 Jan 22 07:37:49 crc kubenswrapper[4982]: I0122 07:37:49.700599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736"} Jan 22 07:37:49 crc kubenswrapper[4982]: I0122 07:37:49.700632 4982 scope.go:117] "RemoveContainer" containerID="19d585995a1a34566da5efb1814b5c4cba5ac6e44bd9e57fd395fd3d6c801d5a" Jan 22 07:37:49 crc kubenswrapper[4982]: I0122 07:37:49.702144 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:37:49 crc kubenswrapper[4982]: E0122 07:37:49.702768 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:38:02 crc kubenswrapper[4982]: I0122 07:38:02.720584 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:38:02 crc kubenswrapper[4982]: E0122 07:38:02.721908 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.258993 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:12 crc kubenswrapper[4982]: E0122 07:38:12.260614 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="extract-utilities" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.260644 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="extract-utilities" Jan 22 07:38:12 crc kubenswrapper[4982]: E0122 07:38:12.260685 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="registry-server" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.260701 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="registry-server" Jan 22 07:38:12 crc kubenswrapper[4982]: E0122 07:38:12.260736 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="extract-content" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.260749 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="extract-content" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.261212 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a19c82-f3c8-4ab7-bfa1-9fd304a28c92" containerName="registry-server" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.264640 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.280497 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.313904 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.314393 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.314495 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-784jm\" (UniqueName: \"kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.417182 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.417382 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-784jm\" (UniqueName: \"kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.417590 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.417661 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.418245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.440019 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-784jm\" (UniqueName: \"kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm\") pod \"redhat-operators-l6767\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:12 crc kubenswrapper[4982]: I0122 07:38:12.598437 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:13 crc kubenswrapper[4982]: I0122 07:38:13.072667 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:13 crc kubenswrapper[4982]: I0122 07:38:13.950099 4982 generic.go:334] "Generic (PLEG): container finished" podID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerID="15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211" exitCode=0 Jan 22 07:38:13 crc kubenswrapper[4982]: I0122 07:38:13.950186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerDied","Data":"15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211"} Jan 22 07:38:13 crc kubenswrapper[4982]: I0122 07:38:13.950538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerStarted","Data":"c2f8d11065c7b1a953d43815424e70910ae3027d2eceb5a03b7f039af9d265c0"} Jan 22 07:38:14 crc kubenswrapper[4982]: I0122 07:38:14.971619 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerStarted","Data":"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5"} Jan 22 07:38:17 crc kubenswrapper[4982]: I0122 07:38:17.728229 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:38:17 crc kubenswrapper[4982]: E0122 07:38:17.728936 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:38:19 crc kubenswrapper[4982]: I0122 07:38:19.011383 4982 generic.go:334] "Generic (PLEG): container finished" podID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerID="d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5" exitCode=0 Jan 22 07:38:19 crc kubenswrapper[4982]: I0122 07:38:19.011619 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerDied","Data":"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5"} Jan 22 07:38:20 crc kubenswrapper[4982]: I0122 07:38:20.023245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerStarted","Data":"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6"} Jan 22 07:38:20 crc kubenswrapper[4982]: I0122 07:38:20.044538 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l6767" podStartSLOduration=2.261671303 podStartE2EDuration="8.044516988s" podCreationTimestamp="2026-01-22 07:38:12 +0000 UTC" firstStartedPulling="2026-01-22 07:38:13.954575558 +0000 UTC m=+6754.793213561" lastFinishedPulling="2026-01-22 07:38:19.737421203 +0000 UTC m=+6760.576059246" observedRunningTime="2026-01-22 07:38:20.042313228 +0000 UTC m=+6760.880951251" watchObservedRunningTime="2026-01-22 07:38:20.044516988 +0000 UTC m=+6760.883154991" Jan 22 07:38:22 crc kubenswrapper[4982]: I0122 07:38:22.599375 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:22 crc kubenswrapper[4982]: I0122 07:38:22.600129 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:23 crc kubenswrapper[4982]: I0122 07:38:23.658257 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l6767" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="registry-server" probeResult="failure" output=< Jan 22 07:38:23 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:38:23 crc kubenswrapper[4982]: > Jan 22 07:38:29 crc kubenswrapper[4982]: I0122 07:38:29.727691 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:38:29 crc kubenswrapper[4982]: E0122 07:38:29.728561 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:38:32 crc kubenswrapper[4982]: I0122 07:38:32.663913 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:32 crc kubenswrapper[4982]: I0122 07:38:32.714840 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:32 crc kubenswrapper[4982]: I0122 07:38:32.897700 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.182047 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l6767" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="registry-server" containerID="cri-o://e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6" gracePeriod=2 Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.715272 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.816570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content\") pod \"c1616f39-ef16-4468-b28c-a9b9691e7e02\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.816703 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities\") pod \"c1616f39-ef16-4468-b28c-a9b9691e7e02\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.817685 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities" (OuterVolumeSpecName: "utilities") pod "c1616f39-ef16-4468-b28c-a9b9691e7e02" (UID: "c1616f39-ef16-4468-b28c-a9b9691e7e02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.817814 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-784jm\" (UniqueName: \"kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm\") pod \"c1616f39-ef16-4468-b28c-a9b9691e7e02\" (UID: \"c1616f39-ef16-4468-b28c-a9b9691e7e02\") " Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.820686 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.823308 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm" (OuterVolumeSpecName: "kube-api-access-784jm") pod "c1616f39-ef16-4468-b28c-a9b9691e7e02" (UID: "c1616f39-ef16-4468-b28c-a9b9691e7e02"). InnerVolumeSpecName "kube-api-access-784jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.923267 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-784jm\" (UniqueName: \"kubernetes.io/projected/c1616f39-ef16-4468-b28c-a9b9691e7e02-kube-api-access-784jm\") on node \"crc\" DevicePath \"\"" Jan 22 07:38:34 crc kubenswrapper[4982]: I0122 07:38:34.936731 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1616f39-ef16-4468-b28c-a9b9691e7e02" (UID: "c1616f39-ef16-4468-b28c-a9b9691e7e02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.024988 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1616f39-ef16-4468-b28c-a9b9691e7e02-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.194700 4982 generic.go:334] "Generic (PLEG): container finished" podID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerID="e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6" exitCode=0 Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.194769 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6767" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.194804 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerDied","Data":"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6"} Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.195096 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6767" event={"ID":"c1616f39-ef16-4468-b28c-a9b9691e7e02","Type":"ContainerDied","Data":"c2f8d11065c7b1a953d43815424e70910ae3027d2eceb5a03b7f039af9d265c0"} Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.195123 4982 scope.go:117] "RemoveContainer" containerID="e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.222141 4982 scope.go:117] "RemoveContainer" containerID="d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.224268 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.234725 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l6767"] Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.255552 4982 scope.go:117] "RemoveContainer" containerID="15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.296305 4982 scope.go:117] "RemoveContainer" containerID="e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6" Jan 22 07:38:35 crc kubenswrapper[4982]: E0122 07:38:35.296804 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6\": container with ID starting with e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6 not found: ID does not exist" containerID="e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.296919 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6"} err="failed to get container status \"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6\": rpc error: code = NotFound desc = could not find container \"e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6\": container with ID starting with e74f42ac9bac0705332995b2bef81f2221616f44dd6a73fe3b237ced776b56c6 not found: ID does not exist" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.297002 4982 scope.go:117] "RemoveContainer" containerID="d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5" Jan 22 07:38:35 crc kubenswrapper[4982]: E0122 07:38:35.297633 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5\": container with ID starting with d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5 not found: ID does not exist" containerID="d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.297662 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5"} err="failed to get container status \"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5\": rpc error: code = NotFound desc = could not find container \"d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5\": container with ID starting with d3393e5936cb4041abbb8b92266210885282363ef5e545eca849a5b5bd896dd5 not found: ID does not exist" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.297685 4982 scope.go:117] "RemoveContainer" containerID="15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211" Jan 22 07:38:35 crc kubenswrapper[4982]: E0122 07:38:35.298070 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211\": container with ID starting with 15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211 not found: ID does not exist" containerID="15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.298093 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211"} err="failed to get container status \"15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211\": rpc error: code = NotFound desc = could not find container \"15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211\": container with ID starting with 15400b2110385a8ca5997785a10acded73ba9634829dbbe31c54c97f9b7e2211 not found: ID does not exist" Jan 22 07:38:35 crc kubenswrapper[4982]: I0122 07:38:35.747007 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" path="/var/lib/kubelet/pods/c1616f39-ef16-4468-b28c-a9b9691e7e02/volumes" Jan 22 07:38:42 crc kubenswrapper[4982]: I0122 07:38:42.720161 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:38:42 crc kubenswrapper[4982]: E0122 07:38:42.721207 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:38:55 crc kubenswrapper[4982]: I0122 07:38:55.719477 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:38:55 crc kubenswrapper[4982]: E0122 07:38:55.720161 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:39:07 crc kubenswrapper[4982]: I0122 07:39:07.720062 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:39:07 crc kubenswrapper[4982]: E0122 07:39:07.720935 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:39:10 crc kubenswrapper[4982]: I0122 07:39:10.039811 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-9rx9z"] Jan 22 07:39:10 crc kubenswrapper[4982]: I0122 07:39:10.050301 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-3c01-account-create-update-lwkdv"] Jan 22 07:39:10 crc kubenswrapper[4982]: I0122 07:39:10.058416 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-3c01-account-create-update-lwkdv"] Jan 22 07:39:10 crc kubenswrapper[4982]: I0122 07:39:10.065970 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-9rx9z"] Jan 22 07:39:11 crc kubenswrapper[4982]: I0122 07:39:11.734193 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56983423-387d-434d-a31a-44ea32d0d01a" path="/var/lib/kubelet/pods/56983423-387d-434d-a31a-44ea32d0d01a/volumes" Jan 22 07:39:11 crc kubenswrapper[4982]: I0122 07:39:11.735558 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bd94f5c-8bb0-4974-925e-e4b48d664221" path="/var/lib/kubelet/pods/8bd94f5c-8bb0-4974-925e-e4b48d664221/volumes" Jan 22 07:39:20 crc kubenswrapper[4982]: I0122 07:39:20.719607 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:39:20 crc kubenswrapper[4982]: E0122 07:39:20.720428 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:39:26 crc kubenswrapper[4982]: I0122 07:39:26.052091 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-bq959"] Jan 22 07:39:26 crc kubenswrapper[4982]: I0122 07:39:26.064408 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-bq959"] Jan 22 07:39:27 crc kubenswrapper[4982]: I0122 07:39:27.738481 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df9743d7-745a-45f6-91f0-564f7e4811df" path="/var/lib/kubelet/pods/df9743d7-745a-45f6-91f0-564f7e4811df/volumes" Jan 22 07:39:35 crc kubenswrapper[4982]: I0122 07:39:35.720187 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:39:35 crc kubenswrapper[4982]: E0122 07:39:35.721413 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:39:49 crc kubenswrapper[4982]: I0122 07:39:49.728420 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:39:49 crc kubenswrapper[4982]: E0122 07:39:49.729423 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:40:01 crc kubenswrapper[4982]: I0122 07:40:01.719804 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:40:01 crc kubenswrapper[4982]: E0122 07:40:01.720959 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:40:08 crc kubenswrapper[4982]: I0122 07:40:08.889781 4982 scope.go:117] "RemoveContainer" containerID="c1afb082fedd7ea8afbe222d864b044d9d7891146609b0fbf695ff894e938216" Jan 22 07:40:08 crc kubenswrapper[4982]: I0122 07:40:08.925720 4982 scope.go:117] "RemoveContainer" containerID="5a3b8ac743fb97c8f64a0a8abf8b7c3aafd9b54fd6f13f9f8b4f9aeac1c4e7a7" Jan 22 07:40:08 crc kubenswrapper[4982]: I0122 07:40:08.968769 4982 scope.go:117] "RemoveContainer" containerID="722ac6b65734b895ac457d4167627ac54d52dea61dd6e589d66116aed112ce8e" Jan 22 07:40:16 crc kubenswrapper[4982]: I0122 07:40:16.720911 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:40:16 crc kubenswrapper[4982]: E0122 07:40:16.721707 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.084588 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:26 crc kubenswrapper[4982]: E0122 07:40:26.086528 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="extract-content" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.086545 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="extract-content" Jan 22 07:40:26 crc kubenswrapper[4982]: E0122 07:40:26.086571 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="registry-server" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.086578 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="registry-server" Jan 22 07:40:26 crc kubenswrapper[4982]: E0122 07:40:26.086596 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="extract-utilities" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.086605 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="extract-utilities" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.086983 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1616f39-ef16-4468-b28c-a9b9691e7e02" containerName="registry-server" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.089033 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.121357 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.233377 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.233546 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxvct\" (UniqueName: \"kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.233669 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.335198 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.335365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.335404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxvct\" (UniqueName: \"kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.335897 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.335901 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.359689 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxvct\" (UniqueName: \"kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct\") pod \"certified-operators-dfm9n\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.424717 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:26 crc kubenswrapper[4982]: I0122 07:40:26.949376 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:27 crc kubenswrapper[4982]: I0122 07:40:27.388431 4982 generic.go:334] "Generic (PLEG): container finished" podID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerID="b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b" exitCode=0 Jan 22 07:40:27 crc kubenswrapper[4982]: I0122 07:40:27.389164 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerDied","Data":"b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b"} Jan 22 07:40:27 crc kubenswrapper[4982]: I0122 07:40:27.389208 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerStarted","Data":"9fac21d17432c8d7d1916c4d049382ca2ebabedc9c93d9a8a9ad59d468957173"} Jan 22 07:40:27 crc kubenswrapper[4982]: I0122 07:40:27.720594 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:40:27 crc kubenswrapper[4982]: E0122 07:40:27.721144 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.419715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerStarted","Data":"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249"} Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.474745 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.477880 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.494874 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.587574 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.587865 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wc6c\" (UniqueName: \"kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.588272 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.691838 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.692392 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.692418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.692462 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wc6c\" (UniqueName: \"kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.692991 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.714590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wc6c\" (UniqueName: \"kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c\") pod \"redhat-marketplace-f8m7k\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:28 crc kubenswrapper[4982]: I0122 07:40:28.805919 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:29 crc kubenswrapper[4982]: I0122 07:40:29.318359 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:29 crc kubenswrapper[4982]: W0122 07:40:29.319291 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9298c872_51cd_4c3d_b54f_a165c1fdb739.slice/crio-0e00259177bf4f98a87f40552775e7b02f6744307d353fc0f3f403f7457d973d WatchSource:0}: Error finding container 0e00259177bf4f98a87f40552775e7b02f6744307d353fc0f3f403f7457d973d: Status 404 returned error can't find the container with id 0e00259177bf4f98a87f40552775e7b02f6744307d353fc0f3f403f7457d973d Jan 22 07:40:29 crc kubenswrapper[4982]: I0122 07:40:29.461761 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerStarted","Data":"0e00259177bf4f98a87f40552775e7b02f6744307d353fc0f3f403f7457d973d"} Jan 22 07:40:29 crc kubenswrapper[4982]: I0122 07:40:29.464273 4982 generic.go:334] "Generic (PLEG): container finished" podID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerID="bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249" exitCode=0 Jan 22 07:40:29 crc kubenswrapper[4982]: I0122 07:40:29.464315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerDied","Data":"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249"} Jan 22 07:40:30 crc kubenswrapper[4982]: I0122 07:40:30.477914 4982 generic.go:334] "Generic (PLEG): container finished" podID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerID="2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac" exitCode=0 Jan 22 07:40:30 crc kubenswrapper[4982]: I0122 07:40:30.477970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerDied","Data":"2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac"} Jan 22 07:40:30 crc kubenswrapper[4982]: I0122 07:40:30.483825 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerStarted","Data":"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a"} Jan 22 07:40:30 crc kubenswrapper[4982]: I0122 07:40:30.542803 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dfm9n" podStartSLOduration=2.089126157 podStartE2EDuration="4.54278316s" podCreationTimestamp="2026-01-22 07:40:26 +0000 UTC" firstStartedPulling="2026-01-22 07:40:27.392126303 +0000 UTC m=+6888.230764306" lastFinishedPulling="2026-01-22 07:40:29.845783316 +0000 UTC m=+6890.684421309" observedRunningTime="2026-01-22 07:40:30.522603465 +0000 UTC m=+6891.361241508" watchObservedRunningTime="2026-01-22 07:40:30.54278316 +0000 UTC m=+6891.381421183" Jan 22 07:40:31 crc kubenswrapper[4982]: I0122 07:40:31.498585 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerStarted","Data":"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd"} Jan 22 07:40:32 crc kubenswrapper[4982]: I0122 07:40:32.512159 4982 generic.go:334] "Generic (PLEG): container finished" podID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerID="8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd" exitCode=0 Jan 22 07:40:32 crc kubenswrapper[4982]: I0122 07:40:32.512281 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerDied","Data":"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd"} Jan 22 07:40:33 crc kubenswrapper[4982]: I0122 07:40:33.526203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerStarted","Data":"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36"} Jan 22 07:40:33 crc kubenswrapper[4982]: I0122 07:40:33.553098 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f8m7k" podStartSLOduration=2.943258963 podStartE2EDuration="5.553071669s" podCreationTimestamp="2026-01-22 07:40:28 +0000 UTC" firstStartedPulling="2026-01-22 07:40:30.481293991 +0000 UTC m=+6891.319931994" lastFinishedPulling="2026-01-22 07:40:33.091106697 +0000 UTC m=+6893.929744700" observedRunningTime="2026-01-22 07:40:33.551839805 +0000 UTC m=+6894.390477818" watchObservedRunningTime="2026-01-22 07:40:33.553071669 +0000 UTC m=+6894.391709672" Jan 22 07:40:36 crc kubenswrapper[4982]: I0122 07:40:36.425008 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:36 crc kubenswrapper[4982]: I0122 07:40:36.426102 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:36 crc kubenswrapper[4982]: I0122 07:40:36.513530 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:36 crc kubenswrapper[4982]: I0122 07:40:36.625492 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:37 crc kubenswrapper[4982]: I0122 07:40:37.274312 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:38 crc kubenswrapper[4982]: I0122 07:40:38.600161 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dfm9n" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="registry-server" containerID="cri-o://3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a" gracePeriod=2 Jan 22 07:40:38 crc kubenswrapper[4982]: I0122 07:40:38.806325 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:38 crc kubenswrapper[4982]: I0122 07:40:38.806401 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:38 crc kubenswrapper[4982]: I0122 07:40:38.867124 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.149658 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.235007 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxvct\" (UniqueName: \"kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct\") pod \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.235192 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities\") pod \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.235333 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content\") pod \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\" (UID: \"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d\") " Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.235959 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities" (OuterVolumeSpecName: "utilities") pod "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" (UID: "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.242295 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct" (OuterVolumeSpecName: "kube-api-access-fxvct") pod "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" (UID: "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d"). InnerVolumeSpecName "kube-api-access-fxvct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.279204 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" (UID: "e6bcc99d-11fe-4729-a6b0-f7dfb254c73d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.339392 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.339451 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxvct\" (UniqueName: \"kubernetes.io/projected/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-kube-api-access-fxvct\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.339465 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.613742 4982 generic.go:334] "Generic (PLEG): container finished" podID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerID="3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a" exitCode=0 Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.613794 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dfm9n" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.613815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerDied","Data":"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a"} Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.614007 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dfm9n" event={"ID":"e6bcc99d-11fe-4729-a6b0-f7dfb254c73d","Type":"ContainerDied","Data":"9fac21d17432c8d7d1916c4d049382ca2ebabedc9c93d9a8a9ad59d468957173"} Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.614029 4982 scope.go:117] "RemoveContainer" containerID="3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.644812 4982 scope.go:117] "RemoveContainer" containerID="bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.658988 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.668552 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dfm9n"] Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.680526 4982 scope.go:117] "RemoveContainer" containerID="b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.692407 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.730275 4982 scope.go:117] "RemoveContainer" containerID="3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a" Jan 22 07:40:39 crc kubenswrapper[4982]: E0122 07:40:39.740357 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a\": container with ID starting with 3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a not found: ID does not exist" containerID="3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.740433 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a"} err="failed to get container status \"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a\": rpc error: code = NotFound desc = could not find container \"3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a\": container with ID starting with 3923c09be218da9eaffd2880873f9775d5c731bf01583eed867aff7be8523a0a not found: ID does not exist" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.740473 4982 scope.go:117] "RemoveContainer" containerID="bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249" Jan 22 07:40:39 crc kubenswrapper[4982]: E0122 07:40:39.740943 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249\": container with ID starting with bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249 not found: ID does not exist" containerID="bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.740989 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249"} err="failed to get container status \"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249\": rpc error: code = NotFound desc = could not find container \"bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249\": container with ID starting with bb8b59f96f810c6ae6e12632094edd694510447bd95a387cac618622d5f2f249 not found: ID does not exist" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.741020 4982 scope.go:117] "RemoveContainer" containerID="b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b" Jan 22 07:40:39 crc kubenswrapper[4982]: E0122 07:40:39.741561 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b\": container with ID starting with b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b not found: ID does not exist" containerID="b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.741592 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b"} err="failed to get container status \"b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b\": rpc error: code = NotFound desc = could not find container \"b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b\": container with ID starting with b521802469d98625f08c91d255498e749e9aa8d13b76c05df38b77d9cd60387b not found: ID does not exist" Jan 22 07:40:39 crc kubenswrapper[4982]: I0122 07:40:39.747044 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" path="/var/lib/kubelet/pods/e6bcc99d-11fe-4729-a6b0-f7dfb254c73d/volumes" Jan 22 07:40:40 crc kubenswrapper[4982]: I0122 07:40:40.718579 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:40:40 crc kubenswrapper[4982]: E0122 07:40:40.718827 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.067174 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.067807 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f8m7k" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="registry-server" containerID="cri-o://276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36" gracePeriod=2 Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.583236 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.649675 4982 generic.go:334] "Generic (PLEG): container finished" podID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerID="276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36" exitCode=0 Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.649724 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerDied","Data":"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36"} Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.649753 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8m7k" event={"ID":"9298c872-51cd-4c3d-b54f-a165c1fdb739","Type":"ContainerDied","Data":"0e00259177bf4f98a87f40552775e7b02f6744307d353fc0f3f403f7457d973d"} Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.649772 4982 scope.go:117] "RemoveContainer" containerID="276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.649977 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8m7k" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.672334 4982 scope.go:117] "RemoveContainer" containerID="8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.690734 4982 scope.go:117] "RemoveContainer" containerID="2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.715378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wc6c\" (UniqueName: \"kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c\") pod \"9298c872-51cd-4c3d-b54f-a165c1fdb739\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.715605 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities\") pod \"9298c872-51cd-4c3d-b54f-a165c1fdb739\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.715700 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content\") pod \"9298c872-51cd-4c3d-b54f-a165c1fdb739\" (UID: \"9298c872-51cd-4c3d-b54f-a165c1fdb739\") " Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.716445 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities" (OuterVolumeSpecName: "utilities") pod "9298c872-51cd-4c3d-b54f-a165c1fdb739" (UID: "9298c872-51cd-4c3d-b54f-a165c1fdb739"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.723494 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c" (OuterVolumeSpecName: "kube-api-access-8wc6c") pod "9298c872-51cd-4c3d-b54f-a165c1fdb739" (UID: "9298c872-51cd-4c3d-b54f-a165c1fdb739"). InnerVolumeSpecName "kube-api-access-8wc6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.738092 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9298c872-51cd-4c3d-b54f-a165c1fdb739" (UID: "9298c872-51cd-4c3d-b54f-a165c1fdb739"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.783153 4982 scope.go:117] "RemoveContainer" containerID="276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36" Jan 22 07:40:42 crc kubenswrapper[4982]: E0122 07:40:42.783480 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36\": container with ID starting with 276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36 not found: ID does not exist" containerID="276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.783602 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36"} err="failed to get container status \"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36\": rpc error: code = NotFound desc = could not find container \"276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36\": container with ID starting with 276d5b639fea55f18480d7a0bb7cda1c5730f0b5ac568e10d87ed4a6b52ffa36 not found: ID does not exist" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.783736 4982 scope.go:117] "RemoveContainer" containerID="8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd" Jan 22 07:40:42 crc kubenswrapper[4982]: E0122 07:40:42.784132 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd\": container with ID starting with 8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd not found: ID does not exist" containerID="8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.784164 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd"} err="failed to get container status \"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd\": rpc error: code = NotFound desc = could not find container \"8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd\": container with ID starting with 8e38cb041105450b54fbd14d8a5c3977d8f3f3e1d00b1ce7b03f630c193c6bbd not found: ID does not exist" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.784184 4982 scope.go:117] "RemoveContainer" containerID="2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac" Jan 22 07:40:42 crc kubenswrapper[4982]: E0122 07:40:42.784420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac\": container with ID starting with 2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac not found: ID does not exist" containerID="2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.784508 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac"} err="failed to get container status \"2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac\": rpc error: code = NotFound desc = could not find container \"2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac\": container with ID starting with 2e30ce2b31e8ba21ac76af7f6eacd905651894b3b2b22e8f684cdc23ddfd5eac not found: ID does not exist" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.817910 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.818423 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9298c872-51cd-4c3d-b54f-a165c1fdb739-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.818449 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wc6c\" (UniqueName: \"kubernetes.io/projected/9298c872-51cd-4c3d-b54f-a165c1fdb739-kube-api-access-8wc6c\") on node \"crc\" DevicePath \"\"" Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.980136 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:42 crc kubenswrapper[4982]: I0122 07:40:42.989390 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8m7k"] Jan 22 07:40:43 crc kubenswrapper[4982]: I0122 07:40:43.734663 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" path="/var/lib/kubelet/pods/9298c872-51cd-4c3d-b54f-a165c1fdb739/volumes" Jan 22 07:40:53 crc kubenswrapper[4982]: I0122 07:40:53.720722 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:40:53 crc kubenswrapper[4982]: E0122 07:40:53.721870 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:41:07 crc kubenswrapper[4982]: I0122 07:41:07.720016 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:41:07 crc kubenswrapper[4982]: E0122 07:41:07.720952 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:41:20 crc kubenswrapper[4982]: I0122 07:41:20.719713 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:41:20 crc kubenswrapper[4982]: E0122 07:41:20.721261 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.041413 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-stvft"] Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.052451 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-ca6f-account-create-update-x9qzb"] Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.060583 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-stvft"] Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.067840 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-ca6f-account-create-update-x9qzb"] Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.738946 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dede625-5da4-4c2c-b10a-d62d45d2d6cd" path="/var/lib/kubelet/pods/9dede625-5da4-4c2c-b10a-d62d45d2d6cd/volumes" Jan 22 07:41:31 crc kubenswrapper[4982]: I0122 07:41:31.740295 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="becb33c4-a51f-43dc-a715-38cbd1bd3cbb" path="/var/lib/kubelet/pods/becb33c4-a51f-43dc-a715-38cbd1bd3cbb/volumes" Jan 22 07:41:33 crc kubenswrapper[4982]: I0122 07:41:33.719521 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:41:33 crc kubenswrapper[4982]: E0122 07:41:33.720225 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:41:43 crc kubenswrapper[4982]: I0122 07:41:43.036657 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-wgf8h"] Jan 22 07:41:43 crc kubenswrapper[4982]: I0122 07:41:43.046666 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-wgf8h"] Jan 22 07:41:43 crc kubenswrapper[4982]: I0122 07:41:43.733911 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28f46de4-fbd7-436f-ab8e-01d03d10f323" path="/var/lib/kubelet/pods/28f46de4-fbd7-436f-ab8e-01d03d10f323/volumes" Jan 22 07:41:45 crc kubenswrapper[4982]: I0122 07:41:45.720320 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:41:45 crc kubenswrapper[4982]: E0122 07:41:45.720671 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:41:59 crc kubenswrapper[4982]: I0122 07:41:59.725471 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:41:59 crc kubenswrapper[4982]: E0122 07:41:59.726533 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:42:04 crc kubenswrapper[4982]: I0122 07:42:04.048124 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-w6vrx"] Jan 22 07:42:04 crc kubenswrapper[4982]: I0122 07:42:04.061694 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-w6vrx"] Jan 22 07:42:05 crc kubenswrapper[4982]: I0122 07:42:05.029817 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-2f89-account-create-update-ggr5k"] Jan 22 07:42:05 crc kubenswrapper[4982]: I0122 07:42:05.039574 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-2f89-account-create-update-ggr5k"] Jan 22 07:42:05 crc kubenswrapper[4982]: I0122 07:42:05.739225 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242a1426-a222-444f-805e-b54b394f4ca1" path="/var/lib/kubelet/pods/242a1426-a222-444f-805e-b54b394f4ca1/volumes" Jan 22 07:42:05 crc kubenswrapper[4982]: I0122 07:42:05.742128 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="777282d9-58f5-4a8f-bd6e-e6e7b8269196" path="/var/lib/kubelet/pods/777282d9-58f5-4a8f-bd6e-e6e7b8269196/volumes" Jan 22 07:42:09 crc kubenswrapper[4982]: I0122 07:42:09.131711 4982 scope.go:117] "RemoveContainer" containerID="a133e98520d6d4e154b4c2c9d3224fdf982cb968cedb70eb3836cb780639226f" Jan 22 07:42:09 crc kubenswrapper[4982]: I0122 07:42:09.173718 4982 scope.go:117] "RemoveContainer" containerID="fda6dcca3f60f5093ffdda3b98622604896c0b7e415fe1b27534b488b79d06df" Jan 22 07:42:09 crc kubenswrapper[4982]: I0122 07:42:09.228243 4982 scope.go:117] "RemoveContainer" containerID="6d8e3cd212b7aff34a24c956c2b7778da798c9594db1d2d461c73dc52dca16d6" Jan 22 07:42:09 crc kubenswrapper[4982]: I0122 07:42:09.290023 4982 scope.go:117] "RemoveContainer" containerID="99344cfb78435195c75851dca1c58d4b5d23795de0e231465e1a9f60015af407" Jan 22 07:42:09 crc kubenswrapper[4982]: I0122 07:42:09.331288 4982 scope.go:117] "RemoveContainer" containerID="236981769c67c0d04368b824f79e404c515981a95ac65b4afb8fe1094aa5df10" Jan 22 07:42:11 crc kubenswrapper[4982]: I0122 07:42:11.720224 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:42:11 crc kubenswrapper[4982]: E0122 07:42:11.721255 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:42:16 crc kubenswrapper[4982]: I0122 07:42:16.055403 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-vhkxb"] Jan 22 07:42:16 crc kubenswrapper[4982]: I0122 07:42:16.066829 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-vhkxb"] Jan 22 07:42:17 crc kubenswrapper[4982]: I0122 07:42:17.733017 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4cd78e-2b3f-4f15-aefc-850938dbadf6" path="/var/lib/kubelet/pods/cf4cd78e-2b3f-4f15-aefc-850938dbadf6/volumes" Jan 22 07:42:23 crc kubenswrapper[4982]: I0122 07:42:23.719410 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:42:23 crc kubenswrapper[4982]: E0122 07:42:23.720256 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:42:35 crc kubenswrapper[4982]: I0122 07:42:35.719081 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:42:35 crc kubenswrapper[4982]: E0122 07:42:35.719968 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:42:47 crc kubenswrapper[4982]: I0122 07:42:47.719745 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:42:47 crc kubenswrapper[4982]: E0122 07:42:47.720948 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:43:01 crc kubenswrapper[4982]: I0122 07:43:01.721132 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:43:02 crc kubenswrapper[4982]: I0122 07:43:02.104152 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb"} Jan 22 07:43:09 crc kubenswrapper[4982]: I0122 07:43:09.479423 4982 scope.go:117] "RemoveContainer" containerID="cbf18aba83b773b2138be0e346472afcaff7b48783a019ba8c74c7ddaa5965a9" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.146228 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2"] Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147162 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="extract-utilities" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147175 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="extract-utilities" Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147202 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147208 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147223 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147230 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147242 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="extract-content" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147249 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="extract-content" Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147259 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="extract-utilities" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147265 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="extract-utilities" Jan 22 07:45:00 crc kubenswrapper[4982]: E0122 07:45:00.147278 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="extract-content" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147283 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="extract-content" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147499 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9298c872-51cd-4c3d-b54f-a165c1fdb739" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.147520 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6bcc99d-11fe-4729-a6b0-f7dfb254c73d" containerName="registry-server" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.148266 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.150496 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.151643 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.165224 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2"] Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.303328 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.303393 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99s2q\" (UniqueName: \"kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.303441 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.405263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99s2q\" (UniqueName: \"kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.405351 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.405558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.406454 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.416658 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.422890 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99s2q\" (UniqueName: \"kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q\") pod \"collect-profiles-29484465-nfrl2\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.468547 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:00 crc kubenswrapper[4982]: I0122 07:45:00.954521 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2"] Jan 22 07:45:01 crc kubenswrapper[4982]: I0122 07:45:01.582752 4982 generic.go:334] "Generic (PLEG): container finished" podID="7e4c3901-24d8-48d4-aa6b-134a6b2cab71" containerID="2f20e3b64882525c0eebf2eaf1a92887caf33d9733b9e6dcd874f6b737ff33f9" exitCode=0 Jan 22 07:45:01 crc kubenswrapper[4982]: I0122 07:45:01.583153 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" event={"ID":"7e4c3901-24d8-48d4-aa6b-134a6b2cab71","Type":"ContainerDied","Data":"2f20e3b64882525c0eebf2eaf1a92887caf33d9733b9e6dcd874f6b737ff33f9"} Jan 22 07:45:01 crc kubenswrapper[4982]: I0122 07:45:01.583244 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" event={"ID":"7e4c3901-24d8-48d4-aa6b-134a6b2cab71","Type":"ContainerStarted","Data":"fb697482fd5c19a90535225b69d79df2d561f5988d213ccc35b96935bc164156"} Jan 22 07:45:02 crc kubenswrapper[4982]: I0122 07:45:02.963135 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.059089 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume\") pod \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.059296 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99s2q\" (UniqueName: \"kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q\") pod \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.059542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume\") pod \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\" (UID: \"7e4c3901-24d8-48d4-aa6b-134a6b2cab71\") " Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.059822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume" (OuterVolumeSpecName: "config-volume") pod "7e4c3901-24d8-48d4-aa6b-134a6b2cab71" (UID: "7e4c3901-24d8-48d4-aa6b-134a6b2cab71"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.060274 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.064727 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7e4c3901-24d8-48d4-aa6b-134a6b2cab71" (UID: "7e4c3901-24d8-48d4-aa6b-134a6b2cab71"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.065785 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q" (OuterVolumeSpecName: "kube-api-access-99s2q") pod "7e4c3901-24d8-48d4-aa6b-134a6b2cab71" (UID: "7e4c3901-24d8-48d4-aa6b-134a6b2cab71"). InnerVolumeSpecName "kube-api-access-99s2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.162087 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.162365 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99s2q\" (UniqueName: \"kubernetes.io/projected/7e4c3901-24d8-48d4-aa6b-134a6b2cab71-kube-api-access-99s2q\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.602118 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" event={"ID":"7e4c3901-24d8-48d4-aa6b-134a6b2cab71","Type":"ContainerDied","Data":"fb697482fd5c19a90535225b69d79df2d561f5988d213ccc35b96935bc164156"} Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.602180 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2" Jan 22 07:45:03 crc kubenswrapper[4982]: I0122 07:45:03.602181 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb697482fd5c19a90535225b69d79df2d561f5988d213ccc35b96935bc164156" Jan 22 07:45:04 crc kubenswrapper[4982]: I0122 07:45:04.048048 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227"] Jan 22 07:45:04 crc kubenswrapper[4982]: I0122 07:45:04.057500 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484420-jt227"] Jan 22 07:45:05 crc kubenswrapper[4982]: I0122 07:45:05.749622 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb8519a1-7d8c-4f8d-a98d-640f74ff041b" path="/var/lib/kubelet/pods/eb8519a1-7d8c-4f8d-a98d-640f74ff041b/volumes" Jan 22 07:45:09 crc kubenswrapper[4982]: I0122 07:45:09.587209 4982 scope.go:117] "RemoveContainer" containerID="ecd3dcc1e3671f9730c3bd03dd8310cc21f99f5e723ecea5e2d22596ed3979d2" Jan 22 07:45:14 crc kubenswrapper[4982]: I0122 07:45:14.713041 4982 generic.go:334] "Generic (PLEG): container finished" podID="a0001918-c9c5-4f77-8d8a-c0021c280883" containerID="eb185425c3833e7a38bd9fff14a33a682ecfd9fdce511289db5bb0e68c2b160d" exitCode=0 Jan 22 07:45:14 crc kubenswrapper[4982]: I0122 07:45:14.713221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" event={"ID":"a0001918-c9c5-4f77-8d8a-c0021c280883","Type":"ContainerDied","Data":"eb185425c3833e7a38bd9fff14a33a682ecfd9fdce511289db5bb0e68c2b160d"} Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.267731 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.376029 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1\") pod \"a0001918-c9c5-4f77-8d8a-c0021c280883\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.376213 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kxx2\" (UniqueName: \"kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2\") pod \"a0001918-c9c5-4f77-8d8a-c0021c280883\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.376308 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph\") pod \"a0001918-c9c5-4f77-8d8a-c0021c280883\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.376407 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory\") pod \"a0001918-c9c5-4f77-8d8a-c0021c280883\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.376479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle\") pod \"a0001918-c9c5-4f77-8d8a-c0021c280883\" (UID: \"a0001918-c9c5-4f77-8d8a-c0021c280883\") " Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.383466 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "a0001918-c9c5-4f77-8d8a-c0021c280883" (UID: "a0001918-c9c5-4f77-8d8a-c0021c280883"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.385169 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph" (OuterVolumeSpecName: "ceph") pod "a0001918-c9c5-4f77-8d8a-c0021c280883" (UID: "a0001918-c9c5-4f77-8d8a-c0021c280883"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.385191 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2" (OuterVolumeSpecName: "kube-api-access-8kxx2") pod "a0001918-c9c5-4f77-8d8a-c0021c280883" (UID: "a0001918-c9c5-4f77-8d8a-c0021c280883"). InnerVolumeSpecName "kube-api-access-8kxx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.405819 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory" (OuterVolumeSpecName: "inventory") pod "a0001918-c9c5-4f77-8d8a-c0021c280883" (UID: "a0001918-c9c5-4f77-8d8a-c0021c280883"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.408482 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "a0001918-c9c5-4f77-8d8a-c0021c280883" (UID: "a0001918-c9c5-4f77-8d8a-c0021c280883"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.479478 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.479511 4982 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.479523 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.479536 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kxx2\" (UniqueName: \"kubernetes.io/projected/a0001918-c9c5-4f77-8d8a-c0021c280883-kube-api-access-8kxx2\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.479546 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0001918-c9c5-4f77-8d8a-c0021c280883-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.739843 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" event={"ID":"a0001918-c9c5-4f77-8d8a-c0021c280883","Type":"ContainerDied","Data":"796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc"} Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.739908 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="796ebf9322927ae7f7a6ff2e3245327b668a04c995a560b8e1edf88f217a23bc" Jan 22 07:45:16 crc kubenswrapper[4982]: I0122 07:45:16.739964 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp" Jan 22 07:45:18 crc kubenswrapper[4982]: I0122 07:45:18.974613 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:45:18 crc kubenswrapper[4982]: I0122 07:45:18.975310 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.614426 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-98c2s"] Jan 22 07:45:19 crc kubenswrapper[4982]: E0122 07:45:19.615045 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0001918-c9c5-4f77-8d8a-c0021c280883" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.615071 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0001918-c9c5-4f77-8d8a-c0021c280883" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 07:45:19 crc kubenswrapper[4982]: E0122 07:45:19.615095 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4c3901-24d8-48d4-aa6b-134a6b2cab71" containerName="collect-profiles" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.615105 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4c3901-24d8-48d4-aa6b-134a6b2cab71" containerName="collect-profiles" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.615387 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4c3901-24d8-48d4-aa6b-134a6b2cab71" containerName="collect-profiles" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.615413 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0001918-c9c5-4f77-8d8a-c0021c280883" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.616380 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.618628 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.619023 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.619489 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.619696 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.624906 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-98c2s"] Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.765034 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.765218 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.765264 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.765294 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.765396 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9qzx\" (UniqueName: \"kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.867519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.867586 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.867612 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.867691 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9qzx\" (UniqueName: \"kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.867745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.876128 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.876137 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.876883 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.877375 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.888081 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9qzx\" (UniqueName: \"kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx\") pod \"bootstrap-openstack-openstack-cell1-98c2s\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:19 crc kubenswrapper[4982]: I0122 07:45:19.943360 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:45:20 crc kubenswrapper[4982]: I0122 07:45:20.509299 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-98c2s"] Jan 22 07:45:20 crc kubenswrapper[4982]: I0122 07:45:20.509743 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:45:20 crc kubenswrapper[4982]: I0122 07:45:20.778091 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" event={"ID":"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b","Type":"ContainerStarted","Data":"57152879b0beef0935f71a4892ccba6dd1de0887c431208617e546aaf3c5e3a0"} Jan 22 07:45:21 crc kubenswrapper[4982]: I0122 07:45:21.791972 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" event={"ID":"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b","Type":"ContainerStarted","Data":"4120edb3b475c88e72adb7b8b79b0d2ec89643e82f0fd617b78df56b6f43ad7a"} Jan 22 07:45:21 crc kubenswrapper[4982]: I0122 07:45:21.836920 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" podStartSLOduration=2.349781211 podStartE2EDuration="2.836885792s" podCreationTimestamp="2026-01-22 07:45:19 +0000 UTC" firstStartedPulling="2026-01-22 07:45:20.509236766 +0000 UTC m=+7181.347874799" lastFinishedPulling="2026-01-22 07:45:20.996341377 +0000 UTC m=+7181.834979380" observedRunningTime="2026-01-22 07:45:21.816228405 +0000 UTC m=+7182.654866558" watchObservedRunningTime="2026-01-22 07:45:21.836885792 +0000 UTC m=+7182.675523835" Jan 22 07:45:48 crc kubenswrapper[4982]: I0122 07:45:48.974487 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:45:48 crc kubenswrapper[4982]: I0122 07:45:48.976062 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:46:18 crc kubenswrapper[4982]: I0122 07:46:18.974226 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:46:18 crc kubenswrapper[4982]: I0122 07:46:18.974764 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:46:18 crc kubenswrapper[4982]: I0122 07:46:18.974803 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:46:18 crc kubenswrapper[4982]: I0122 07:46:18.975575 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:46:18 crc kubenswrapper[4982]: I0122 07:46:18.975622 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb" gracePeriod=600 Jan 22 07:46:19 crc kubenswrapper[4982]: I0122 07:46:19.417189 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb" exitCode=0 Jan 22 07:46:19 crc kubenswrapper[4982]: I0122 07:46:19.417285 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb"} Jan 22 07:46:19 crc kubenswrapper[4982]: I0122 07:46:19.417637 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538"} Jan 22 07:46:19 crc kubenswrapper[4982]: I0122 07:46:19.417666 4982 scope.go:117] "RemoveContainer" containerID="25737d7b74f4bc6c3c49867f5bcf2f1f76afa70bd21d73f1b66fd2997bf62736" Jan 22 07:48:27 crc kubenswrapper[4982]: I0122 07:48:27.975898 4982 generic.go:334] "Generic (PLEG): container finished" podID="961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" containerID="4120edb3b475c88e72adb7b8b79b0d2ec89643e82f0fd617b78df56b6f43ad7a" exitCode=0 Jan 22 07:48:27 crc kubenswrapper[4982]: I0122 07:48:27.975945 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" event={"ID":"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b","Type":"ContainerDied","Data":"4120edb3b475c88e72adb7b8b79b0d2ec89643e82f0fd617b78df56b6f43ad7a"} Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.440328 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.573755 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1\") pod \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.573862 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory\") pod \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.574026 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9qzx\" (UniqueName: \"kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx\") pod \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.574089 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle\") pod \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.574125 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph\") pod \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\" (UID: \"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b\") " Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.579167 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph" (OuterVolumeSpecName: "ceph") pod "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" (UID: "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.584011 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" (UID: "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.585419 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx" (OuterVolumeSpecName: "kube-api-access-v9qzx") pod "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" (UID: "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b"). InnerVolumeSpecName "kube-api-access-v9qzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.606315 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" (UID: "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.626572 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory" (OuterVolumeSpecName: "inventory") pod "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" (UID: "961e9f4d-7835-4d04-bdde-dbcdb6f54c2b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.677428 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.677467 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.677483 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.677498 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9qzx\" (UniqueName: \"kubernetes.io/projected/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-kube-api-access-v9qzx\") on node \"crc\" DevicePath \"\"" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.677510 4982 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961e9f4d-7835-4d04-bdde-dbcdb6f54c2b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.999845 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" event={"ID":"961e9f4d-7835-4d04-bdde-dbcdb6f54c2b","Type":"ContainerDied","Data":"57152879b0beef0935f71a4892ccba6dd1de0887c431208617e546aaf3c5e3a0"} Jan 22 07:48:29 crc kubenswrapper[4982]: I0122 07:48:29.999904 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57152879b0beef0935f71a4892ccba6dd1de0887c431208617e546aaf3c5e3a0" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:29.999990 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-98c2s" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.088202 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-68mv5"] Jan 22 07:48:30 crc kubenswrapper[4982]: E0122 07:48:30.088644 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.088661 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.089015 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="961e9f4d-7835-4d04-bdde-dbcdb6f54c2b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.089916 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.092196 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.092217 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.095036 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.102064 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.105319 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-68mv5"] Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.187918 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.188032 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvvk8\" (UniqueName: \"kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.188083 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.188186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.289785 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.290127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvvk8\" (UniqueName: \"kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.290244 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.290406 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.294769 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.295075 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.297419 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.308755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvvk8\" (UniqueName: \"kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8\") pod \"download-cache-openstack-openstack-cell1-68mv5\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.407990 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:48:30 crc kubenswrapper[4982]: I0122 07:48:30.960515 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-68mv5"] Jan 22 07:48:31 crc kubenswrapper[4982]: I0122 07:48:31.010938 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" event={"ID":"aaff985b-bf0d-4d70-90d3-4cac37e68578","Type":"ContainerStarted","Data":"863e8e522077b655b9b312ec6af61f5dd70c4bf01204dd8f351b61402244606f"} Jan 22 07:48:32 crc kubenswrapper[4982]: I0122 07:48:32.022938 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" event={"ID":"aaff985b-bf0d-4d70-90d3-4cac37e68578","Type":"ContainerStarted","Data":"f729f53ed57ae2f1c69bb2f96c87fae54ba763f3fca733d4ff3c374a1ef6e990"} Jan 22 07:48:32 crc kubenswrapper[4982]: I0122 07:48:32.053634 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" podStartSLOduration=1.652630429 podStartE2EDuration="2.053613196s" podCreationTimestamp="2026-01-22 07:48:30 +0000 UTC" firstStartedPulling="2026-01-22 07:48:30.969444549 +0000 UTC m=+7371.808082562" lastFinishedPulling="2026-01-22 07:48:31.370427326 +0000 UTC m=+7372.209065329" observedRunningTime="2026-01-22 07:48:32.048632672 +0000 UTC m=+7372.887270705" watchObservedRunningTime="2026-01-22 07:48:32.053613196 +0000 UTC m=+7372.892251209" Jan 22 07:48:48 crc kubenswrapper[4982]: I0122 07:48:48.974267 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:48:48 crc kubenswrapper[4982]: I0122 07:48:48.974844 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:49:18 crc kubenswrapper[4982]: I0122 07:49:18.974278 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:49:18 crc kubenswrapper[4982]: I0122 07:49:18.974803 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:49:33 crc kubenswrapper[4982]: I0122 07:49:33.973639 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:33 crc kubenswrapper[4982]: I0122 07:49:33.976764 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:33 crc kubenswrapper[4982]: I0122 07:49:33.995522 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.032801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.032953 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g488z\" (UniqueName: \"kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.033088 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.136263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.136935 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.137186 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g488z\" (UniqueName: \"kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.137306 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.138227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.164973 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g488z\" (UniqueName: \"kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z\") pod \"redhat-operators-2xdcx\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.299931 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:34 crc kubenswrapper[4982]: I0122 07:49:34.796946 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:35 crc kubenswrapper[4982]: I0122 07:49:35.709508 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerID="fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf" exitCode=0 Jan 22 07:49:35 crc kubenswrapper[4982]: I0122 07:49:35.710744 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerDied","Data":"fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf"} Jan 22 07:49:35 crc kubenswrapper[4982]: I0122 07:49:35.710814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerStarted","Data":"4eaa6edadd6d38992ce43ad70d7715b5392e54471964c82fa129aed367d21b78"} Jan 22 07:49:36 crc kubenswrapper[4982]: I0122 07:49:36.729578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerStarted","Data":"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be"} Jan 22 07:49:37 crc kubenswrapper[4982]: I0122 07:49:37.742386 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerID="9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be" exitCode=0 Jan 22 07:49:37 crc kubenswrapper[4982]: I0122 07:49:37.742645 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerDied","Data":"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be"} Jan 22 07:49:38 crc kubenswrapper[4982]: I0122 07:49:38.759085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerStarted","Data":"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59"} Jan 22 07:49:38 crc kubenswrapper[4982]: I0122 07:49:38.794125 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2xdcx" podStartSLOduration=3.291677087 podStartE2EDuration="5.79410318s" podCreationTimestamp="2026-01-22 07:49:33 +0000 UTC" firstStartedPulling="2026-01-22 07:49:35.716553284 +0000 UTC m=+7436.555191277" lastFinishedPulling="2026-01-22 07:49:38.218979367 +0000 UTC m=+7439.057617370" observedRunningTime="2026-01-22 07:49:38.785653351 +0000 UTC m=+7439.624291364" watchObservedRunningTime="2026-01-22 07:49:38.79410318 +0000 UTC m=+7439.632741183" Jan 22 07:49:44 crc kubenswrapper[4982]: I0122 07:49:44.300991 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:44 crc kubenswrapper[4982]: I0122 07:49:44.301765 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:45 crc kubenswrapper[4982]: I0122 07:49:45.358628 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2xdcx" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="registry-server" probeResult="failure" output=< Jan 22 07:49:45 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:49:45 crc kubenswrapper[4982]: > Jan 22 07:49:48 crc kubenswrapper[4982]: I0122 07:49:48.974246 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:49:48 crc kubenswrapper[4982]: I0122 07:49:48.974817 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:49:48 crc kubenswrapper[4982]: I0122 07:49:48.974893 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:49:48 crc kubenswrapper[4982]: I0122 07:49:48.975512 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:49:48 crc kubenswrapper[4982]: I0122 07:49:48.975569 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" gracePeriod=600 Jan 22 07:49:49 crc kubenswrapper[4982]: E0122 07:49:49.109411 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:49:49 crc kubenswrapper[4982]: I0122 07:49:49.883548 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" exitCode=0 Jan 22 07:49:49 crc kubenswrapper[4982]: I0122 07:49:49.883698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538"} Jan 22 07:49:49 crc kubenswrapper[4982]: I0122 07:49:49.883939 4982 scope.go:117] "RemoveContainer" containerID="8c0b5523d1ee08c03b11a453c95ea4fa585602ba317d6269f55a0e0c8df92cbb" Jan 22 07:49:49 crc kubenswrapper[4982]: I0122 07:49:49.884689 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:49:49 crc kubenswrapper[4982]: E0122 07:49:49.885046 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:49:54 crc kubenswrapper[4982]: I0122 07:49:54.377732 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:54 crc kubenswrapper[4982]: I0122 07:49:54.457841 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:54 crc kubenswrapper[4982]: I0122 07:49:54.621194 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:55 crc kubenswrapper[4982]: I0122 07:49:55.941680 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2xdcx" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="registry-server" containerID="cri-o://ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59" gracePeriod=2 Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.475889 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.559367 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g488z\" (UniqueName: \"kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z\") pod \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.559543 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities\") pod \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.559687 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content\") pod \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\" (UID: \"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea\") " Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.560585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities" (OuterVolumeSpecName: "utilities") pod "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" (UID: "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.565438 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z" (OuterVolumeSpecName: "kube-api-access-g488z") pod "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" (UID: "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea"). InnerVolumeSpecName "kube-api-access-g488z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.662724 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g488z\" (UniqueName: \"kubernetes.io/projected/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-kube-api-access-g488z\") on node \"crc\" DevicePath \"\"" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.662760 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.676374 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" (UID: "6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.764906 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.969807 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerID="ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59" exitCode=0 Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.969907 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2xdcx" Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.969947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerDied","Data":"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59"} Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.970003 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2xdcx" event={"ID":"6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea","Type":"ContainerDied","Data":"4eaa6edadd6d38992ce43ad70d7715b5392e54471964c82fa129aed367d21b78"} Jan 22 07:49:56 crc kubenswrapper[4982]: I0122 07:49:56.970026 4982 scope.go:117] "RemoveContainer" containerID="ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.010686 4982 scope.go:117] "RemoveContainer" containerID="9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.014571 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.025270 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2xdcx"] Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.045221 4982 scope.go:117] "RemoveContainer" containerID="fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.080025 4982 scope.go:117] "RemoveContainer" containerID="ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59" Jan 22 07:49:57 crc kubenswrapper[4982]: E0122 07:49:57.080631 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59\": container with ID starting with ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59 not found: ID does not exist" containerID="ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.080661 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59"} err="failed to get container status \"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59\": rpc error: code = NotFound desc = could not find container \"ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59\": container with ID starting with ff951b31ee5f9d61246b4a75b0be52b3b905cd6eb4cbda61266dd849f1b5cd59 not found: ID does not exist" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.080681 4982 scope.go:117] "RemoveContainer" containerID="9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be" Jan 22 07:49:57 crc kubenswrapper[4982]: E0122 07:49:57.081108 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be\": container with ID starting with 9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be not found: ID does not exist" containerID="9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.081134 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be"} err="failed to get container status \"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be\": rpc error: code = NotFound desc = could not find container \"9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be\": container with ID starting with 9d9af63026e36445ecb2d57042dd64a8e00284e45a8dcfc9541ae1bb1b0a70be not found: ID does not exist" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.081150 4982 scope.go:117] "RemoveContainer" containerID="fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf" Jan 22 07:49:57 crc kubenswrapper[4982]: E0122 07:49:57.081415 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf\": container with ID starting with fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf not found: ID does not exist" containerID="fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.081438 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf"} err="failed to get container status \"fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf\": rpc error: code = NotFound desc = could not find container \"fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf\": container with ID starting with fb48a92e06a377fb5e106fe333f2643ee38b7084c6331cb7e5fb1c8fbea947cf not found: ID does not exist" Jan 22 07:49:57 crc kubenswrapper[4982]: I0122 07:49:57.736470 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" path="/var/lib/kubelet/pods/6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea/volumes" Jan 22 07:50:04 crc kubenswrapper[4982]: I0122 07:50:04.042712 4982 generic.go:334] "Generic (PLEG): container finished" podID="aaff985b-bf0d-4d70-90d3-4cac37e68578" containerID="f729f53ed57ae2f1c69bb2f96c87fae54ba763f3fca733d4ff3c374a1ef6e990" exitCode=0 Jan 22 07:50:04 crc kubenswrapper[4982]: I0122 07:50:04.042803 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" event={"ID":"aaff985b-bf0d-4d70-90d3-4cac37e68578","Type":"ContainerDied","Data":"f729f53ed57ae2f1c69bb2f96c87fae54ba763f3fca733d4ff3c374a1ef6e990"} Jan 22 07:50:04 crc kubenswrapper[4982]: E0122 07:50:04.236582 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaff985b_bf0d_4d70_90d3_4cac37e68578.slice/crio-conmon-f729f53ed57ae2f1c69bb2f96c87fae54ba763f3fca733d4ff3c374a1ef6e990.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:50:04 crc kubenswrapper[4982]: I0122 07:50:04.719126 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:50:04 crc kubenswrapper[4982]: E0122 07:50:04.719744 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.519253 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.570503 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory\") pod \"aaff985b-bf0d-4d70-90d3-4cac37e68578\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.570556 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvvk8\" (UniqueName: \"kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8\") pod \"aaff985b-bf0d-4d70-90d3-4cac37e68578\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.570608 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1\") pod \"aaff985b-bf0d-4d70-90d3-4cac37e68578\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.570706 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph\") pod \"aaff985b-bf0d-4d70-90d3-4cac37e68578\" (UID: \"aaff985b-bf0d-4d70-90d3-4cac37e68578\") " Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.576425 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph" (OuterVolumeSpecName: "ceph") pod "aaff985b-bf0d-4d70-90d3-4cac37e68578" (UID: "aaff985b-bf0d-4d70-90d3-4cac37e68578"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.594919 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8" (OuterVolumeSpecName: "kube-api-access-jvvk8") pod "aaff985b-bf0d-4d70-90d3-4cac37e68578" (UID: "aaff985b-bf0d-4d70-90d3-4cac37e68578"). InnerVolumeSpecName "kube-api-access-jvvk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.598782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "aaff985b-bf0d-4d70-90d3-4cac37e68578" (UID: "aaff985b-bf0d-4d70-90d3-4cac37e68578"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.621042 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory" (OuterVolumeSpecName: "inventory") pod "aaff985b-bf0d-4d70-90d3-4cac37e68578" (UID: "aaff985b-bf0d-4d70-90d3-4cac37e68578"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.672930 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.672963 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.672975 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaff985b-bf0d-4d70-90d3-4cac37e68578-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:50:05 crc kubenswrapper[4982]: I0122 07:50:05.672984 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvvk8\" (UniqueName: \"kubernetes.io/projected/aaff985b-bf0d-4d70-90d3-4cac37e68578-kube-api-access-jvvk8\") on node \"crc\" DevicePath \"\"" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.063871 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" event={"ID":"aaff985b-bf0d-4d70-90d3-4cac37e68578","Type":"ContainerDied","Data":"863e8e522077b655b9b312ec6af61f5dd70c4bf01204dd8f351b61402244606f"} Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.063920 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="863e8e522077b655b9b312ec6af61f5dd70c4bf01204dd8f351b61402244606f" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.063997 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-68mv5" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.147275 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-zbmlr"] Jan 22 07:50:06 crc kubenswrapper[4982]: E0122 07:50:06.147715 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="extract-content" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.147731 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="extract-content" Jan 22 07:50:06 crc kubenswrapper[4982]: E0122 07:50:06.147741 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="extract-utilities" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.147747 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="extract-utilities" Jan 22 07:50:06 crc kubenswrapper[4982]: E0122 07:50:06.147769 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaff985b-bf0d-4d70-90d3-4cac37e68578" containerName="download-cache-openstack-openstack-cell1" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.147774 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaff985b-bf0d-4d70-90d3-4cac37e68578" containerName="download-cache-openstack-openstack-cell1" Jan 22 07:50:06 crc kubenswrapper[4982]: E0122 07:50:06.147795 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="registry-server" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.147801 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="registry-server" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.148034 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c893ca3-7ff2-4b91-9b0f-ca536ed6a7ea" containerName="registry-server" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.148067 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaff985b-bf0d-4d70-90d3-4cac37e68578" containerName="download-cache-openstack-openstack-cell1" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.148824 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.150898 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.151075 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.151184 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.151297 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.160286 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-zbmlr"] Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.185550 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh2rc\" (UniqueName: \"kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.185638 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.185672 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.186109 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.288356 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh2rc\" (UniqueName: \"kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.288570 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.288705 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.289255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.293101 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.293345 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.296394 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.306130 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh2rc\" (UniqueName: \"kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc\") pod \"configure-network-openstack-openstack-cell1-zbmlr\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:06 crc kubenswrapper[4982]: I0122 07:50:06.468215 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:50:07 crc kubenswrapper[4982]: I0122 07:50:07.073200 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-zbmlr"] Jan 22 07:50:08 crc kubenswrapper[4982]: I0122 07:50:08.083976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" event={"ID":"1f1ea1bc-d8d8-4cfb-93bf-86884d573976","Type":"ContainerStarted","Data":"accfb6d2a5693ac4f8a27825736f1fa2ce8021fbe77573b3b034827f703b6b66"} Jan 22 07:50:08 crc kubenswrapper[4982]: I0122 07:50:08.084716 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" event={"ID":"1f1ea1bc-d8d8-4cfb-93bf-86884d573976","Type":"ContainerStarted","Data":"84b172b90028e6b32f6f486fdf2ddaec28a2ea9ad94e11f10df876e13bd89705"} Jan 22 07:50:08 crc kubenswrapper[4982]: I0122 07:50:08.109258 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" podStartSLOduration=1.690777372 podStartE2EDuration="2.10923401s" podCreationTimestamp="2026-01-22 07:50:06 +0000 UTC" firstStartedPulling="2026-01-22 07:50:07.069068927 +0000 UTC m=+7467.907706950" lastFinishedPulling="2026-01-22 07:50:07.487525585 +0000 UTC m=+7468.326163588" observedRunningTime="2026-01-22 07:50:08.103435513 +0000 UTC m=+7468.942073536" watchObservedRunningTime="2026-01-22 07:50:08.10923401 +0000 UTC m=+7468.947872013" Jan 22 07:50:17 crc kubenswrapper[4982]: I0122 07:50:17.720349 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:50:17 crc kubenswrapper[4982]: E0122 07:50:17.721231 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:50:29 crc kubenswrapper[4982]: I0122 07:50:29.730635 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:50:29 crc kubenswrapper[4982]: E0122 07:50:29.732302 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:50:40 crc kubenswrapper[4982]: I0122 07:50:40.720441 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:50:40 crc kubenswrapper[4982]: E0122 07:50:40.721961 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:50:55 crc kubenswrapper[4982]: I0122 07:50:55.719329 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:50:55 crc kubenswrapper[4982]: E0122 07:50:55.719987 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:07 crc kubenswrapper[4982]: I0122 07:51:07.719742 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:51:07 crc kubenswrapper[4982]: E0122 07:51:07.720969 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:22 crc kubenswrapper[4982]: I0122 07:51:22.719725 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:51:22 crc kubenswrapper[4982]: E0122 07:51:22.720772 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:31 crc kubenswrapper[4982]: I0122 07:51:31.953669 4982 generic.go:334] "Generic (PLEG): container finished" podID="1f1ea1bc-d8d8-4cfb-93bf-86884d573976" containerID="accfb6d2a5693ac4f8a27825736f1fa2ce8021fbe77573b3b034827f703b6b66" exitCode=0 Jan 22 07:51:31 crc kubenswrapper[4982]: I0122 07:51:31.953755 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" event={"ID":"1f1ea1bc-d8d8-4cfb-93bf-86884d573976","Type":"ContainerDied","Data":"accfb6d2a5693ac4f8a27825736f1fa2ce8021fbe77573b3b034827f703b6b66"} Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.418412 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.520381 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory\") pod \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.520520 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1\") pod \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.520554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph\") pod \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.520701 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh2rc\" (UniqueName: \"kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc\") pod \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\" (UID: \"1f1ea1bc-d8d8-4cfb-93bf-86884d573976\") " Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.527347 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph" (OuterVolumeSpecName: "ceph") pod "1f1ea1bc-d8d8-4cfb-93bf-86884d573976" (UID: "1f1ea1bc-d8d8-4cfb-93bf-86884d573976"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.531709 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc" (OuterVolumeSpecName: "kube-api-access-fh2rc") pod "1f1ea1bc-d8d8-4cfb-93bf-86884d573976" (UID: "1f1ea1bc-d8d8-4cfb-93bf-86884d573976"). InnerVolumeSpecName "kube-api-access-fh2rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.554053 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "1f1ea1bc-d8d8-4cfb-93bf-86884d573976" (UID: "1f1ea1bc-d8d8-4cfb-93bf-86884d573976"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.554840 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory" (OuterVolumeSpecName: "inventory") pod "1f1ea1bc-d8d8-4cfb-93bf-86884d573976" (UID: "1f1ea1bc-d8d8-4cfb-93bf-86884d573976"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.623832 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.624256 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.624273 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.624285 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh2rc\" (UniqueName: \"kubernetes.io/projected/1f1ea1bc-d8d8-4cfb-93bf-86884d573976-kube-api-access-fh2rc\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.976385 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" event={"ID":"1f1ea1bc-d8d8-4cfb-93bf-86884d573976","Type":"ContainerDied","Data":"84b172b90028e6b32f6f486fdf2ddaec28a2ea9ad94e11f10df876e13bd89705"} Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.976419 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-zbmlr" Jan 22 07:51:33 crc kubenswrapper[4982]: I0122 07:51:33.976436 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b172b90028e6b32f6f486fdf2ddaec28a2ea9ad94e11f10df876e13bd89705" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.075296 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-zcf8c"] Jan 22 07:51:34 crc kubenswrapper[4982]: E0122 07:51:34.075898 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f1ea1bc-d8d8-4cfb-93bf-86884d573976" containerName="configure-network-openstack-openstack-cell1" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.075933 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f1ea1bc-d8d8-4cfb-93bf-86884d573976" containerName="configure-network-openstack-openstack-cell1" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.076195 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f1ea1bc-d8d8-4cfb-93bf-86884d573976" containerName="configure-network-openstack-openstack-cell1" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.076947 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.079616 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.079733 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.079883 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.080402 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.087899 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-zcf8c"] Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.237599 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89xx2\" (UniqueName: \"kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.237691 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.237787 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.237893 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.339798 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89xx2\" (UniqueName: \"kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.339932 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.340187 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.341566 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.344773 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.350808 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.350907 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.357615 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89xx2\" (UniqueName: \"kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2\") pod \"validate-network-openstack-openstack-cell1-zcf8c\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.396066 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.944823 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-zcf8c"] Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.951791 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:51:34 crc kubenswrapper[4982]: I0122 07:51:34.988384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" event={"ID":"8be015a3-c616-41d3-af20-ffcfb571653a","Type":"ContainerStarted","Data":"5c31e6387627ab3360e716f31f551e78d67a1889be1780f914cfb4ace7d04eee"} Jan 22 07:51:35 crc kubenswrapper[4982]: I0122 07:51:35.720249 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:51:35 crc kubenswrapper[4982]: E0122 07:51:35.721194 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:35 crc kubenswrapper[4982]: I0122 07:51:35.999728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" event={"ID":"8be015a3-c616-41d3-af20-ffcfb571653a","Type":"ContainerStarted","Data":"053e4be2dd21024a2128713e3e548c6c1b36c15ab385f370d1c67f9696d10b16"} Jan 22 07:51:36 crc kubenswrapper[4982]: I0122 07:51:36.021467 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" podStartSLOduration=1.600007604 podStartE2EDuration="2.021450643s" podCreationTimestamp="2026-01-22 07:51:34 +0000 UTC" firstStartedPulling="2026-01-22 07:51:34.951605085 +0000 UTC m=+7555.790243088" lastFinishedPulling="2026-01-22 07:51:35.373048124 +0000 UTC m=+7556.211686127" observedRunningTime="2026-01-22 07:51:36.017752792 +0000 UTC m=+7556.856390825" watchObservedRunningTime="2026-01-22 07:51:36.021450643 +0000 UTC m=+7556.860088646" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.043643 4982 generic.go:334] "Generic (PLEG): container finished" podID="8be015a3-c616-41d3-af20-ffcfb571653a" containerID="053e4be2dd21024a2128713e3e548c6c1b36c15ab385f370d1c67f9696d10b16" exitCode=0 Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.045604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" event={"ID":"8be015a3-c616-41d3-af20-ffcfb571653a","Type":"ContainerDied","Data":"053e4be2dd21024a2128713e3e548c6c1b36c15ab385f370d1c67f9696d10b16"} Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.410352 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.413363 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.421435 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.497893 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2kj4\" (UniqueName: \"kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.498043 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.498499 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.600910 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.600992 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.601082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2kj4\" (UniqueName: \"kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.601681 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.601693 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.620008 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2kj4\" (UniqueName: \"kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4\") pod \"redhat-marketplace-4g85m\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:41 crc kubenswrapper[4982]: I0122 07:51:41.736607 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.246998 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.539202 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.624606 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory\") pod \"8be015a3-c616-41d3-af20-ffcfb571653a\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.624841 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1\") pod \"8be015a3-c616-41d3-af20-ffcfb571653a\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.626094 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89xx2\" (UniqueName: \"kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2\") pod \"8be015a3-c616-41d3-af20-ffcfb571653a\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.626322 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph\") pod \"8be015a3-c616-41d3-af20-ffcfb571653a\" (UID: \"8be015a3-c616-41d3-af20-ffcfb571653a\") " Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.633126 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph" (OuterVolumeSpecName: "ceph") pod "8be015a3-c616-41d3-af20-ffcfb571653a" (UID: "8be015a3-c616-41d3-af20-ffcfb571653a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.634148 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2" (OuterVolumeSpecName: "kube-api-access-89xx2") pod "8be015a3-c616-41d3-af20-ffcfb571653a" (UID: "8be015a3-c616-41d3-af20-ffcfb571653a"). InnerVolumeSpecName "kube-api-access-89xx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.663466 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory" (OuterVolumeSpecName: "inventory") pod "8be015a3-c616-41d3-af20-ffcfb571653a" (UID: "8be015a3-c616-41d3-af20-ffcfb571653a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.663912 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "8be015a3-c616-41d3-af20-ffcfb571653a" (UID: "8be015a3-c616-41d3-af20-ffcfb571653a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.731595 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.731628 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89xx2\" (UniqueName: \"kubernetes.io/projected/8be015a3-c616-41d3-af20-ffcfb571653a-kube-api-access-89xx2\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.731637 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:42 crc kubenswrapper[4982]: I0122 07:51:42.731649 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be015a3-c616-41d3-af20-ffcfb571653a-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.065102 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" event={"ID":"8be015a3-c616-41d3-af20-ffcfb571653a","Type":"ContainerDied","Data":"5c31e6387627ab3360e716f31f551e78d67a1889be1780f914cfb4ace7d04eee"} Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.065544 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c31e6387627ab3360e716f31f551e78d67a1889be1780f914cfb4ace7d04eee" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.065147 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-zcf8c" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.067974 4982 generic.go:334] "Generic (PLEG): container finished" podID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerID="1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9" exitCode=0 Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.068316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerDied","Data":"1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9"} Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.068905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerStarted","Data":"f8e7c1766ea332152f0e259aeef71300f53a275b6cd5f9a717ad1416b58baf2e"} Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.165226 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-gpklq"] Jan 22 07:51:43 crc kubenswrapper[4982]: E0122 07:51:43.165676 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be015a3-c616-41d3-af20-ffcfb571653a" containerName="validate-network-openstack-openstack-cell1" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.165693 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be015a3-c616-41d3-af20-ffcfb571653a" containerName="validate-network-openstack-openstack-cell1" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.165922 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be015a3-c616-41d3-af20-ffcfb571653a" containerName="validate-network-openstack-openstack-cell1" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.166777 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.168602 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.168956 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.177673 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-gpklq"] Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.178991 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.180846 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.243897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.244010 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.244178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.244370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r74pb\" (UniqueName: \"kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.347138 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.347213 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.347293 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.347323 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r74pb\" (UniqueName: \"kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.351901 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.352402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.352662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.368973 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r74pb\" (UniqueName: \"kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb\") pod \"install-os-openstack-openstack-cell1-gpklq\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:43 crc kubenswrapper[4982]: I0122 07:51:43.489955 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:51:44 crc kubenswrapper[4982]: I0122 07:51:44.095137 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-gpklq"] Jan 22 07:51:44 crc kubenswrapper[4982]: W0122 07:51:44.110405 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fc5f931_2ac0_4cf7_9938_532c8e526580.slice/crio-708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb WatchSource:0}: Error finding container 708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb: Status 404 returned error can't find the container with id 708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb Jan 22 07:51:45 crc kubenswrapper[4982]: I0122 07:51:45.086977 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-gpklq" event={"ID":"6fc5f931-2ac0-4cf7-9938-532c8e526580","Type":"ContainerStarted","Data":"e2d0250fc7b2480c6daa5a9879a1ba9cd38091ed62527651d7167c7a035e64ce"} Jan 22 07:51:45 crc kubenswrapper[4982]: I0122 07:51:45.087325 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-gpklq" event={"ID":"6fc5f931-2ac0-4cf7-9938-532c8e526580","Type":"ContainerStarted","Data":"708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb"} Jan 22 07:51:45 crc kubenswrapper[4982]: I0122 07:51:45.091746 4982 generic.go:334] "Generic (PLEG): container finished" podID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerID="b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87" exitCode=0 Jan 22 07:51:45 crc kubenswrapper[4982]: I0122 07:51:45.091793 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerDied","Data":"b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87"} Jan 22 07:51:45 crc kubenswrapper[4982]: I0122 07:51:45.111144 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-gpklq" podStartSLOduration=1.543693074 podStartE2EDuration="2.111128229s" podCreationTimestamp="2026-01-22 07:51:43 +0000 UTC" firstStartedPulling="2026-01-22 07:51:44.113881778 +0000 UTC m=+7564.952519771" lastFinishedPulling="2026-01-22 07:51:44.681316923 +0000 UTC m=+7565.519954926" observedRunningTime="2026-01-22 07:51:45.10820767 +0000 UTC m=+7565.946845673" watchObservedRunningTime="2026-01-22 07:51:45.111128229 +0000 UTC m=+7565.949766232" Jan 22 07:51:46 crc kubenswrapper[4982]: I0122 07:51:46.104449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerStarted","Data":"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb"} Jan 22 07:51:46 crc kubenswrapper[4982]: I0122 07:51:46.130213 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4g85m" podStartSLOduration=2.730545802 podStartE2EDuration="5.13018947s" podCreationTimestamp="2026-01-22 07:51:41 +0000 UTC" firstStartedPulling="2026-01-22 07:51:43.070081237 +0000 UTC m=+7563.908719240" lastFinishedPulling="2026-01-22 07:51:45.469724905 +0000 UTC m=+7566.308362908" observedRunningTime="2026-01-22 07:51:46.122971004 +0000 UTC m=+7566.961609017" watchObservedRunningTime="2026-01-22 07:51:46.13018947 +0000 UTC m=+7566.968827483" Jan 22 07:51:47 crc kubenswrapper[4982]: I0122 07:51:47.719610 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:51:47 crc kubenswrapper[4982]: E0122 07:51:47.720288 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:51 crc kubenswrapper[4982]: I0122 07:51:51.739244 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:51 crc kubenswrapper[4982]: I0122 07:51:51.739917 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:51 crc kubenswrapper[4982]: I0122 07:51:51.812652 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:52 crc kubenswrapper[4982]: I0122 07:51:52.248539 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:52 crc kubenswrapper[4982]: I0122 07:51:52.328909 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.178609 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4g85m" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="registry-server" containerID="cri-o://56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb" gracePeriod=2 Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.760139 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.819884 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:51:54 crc kubenswrapper[4982]: E0122 07:51:54.820275 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="registry-server" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.820293 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="registry-server" Jan 22 07:51:54 crc kubenswrapper[4982]: E0122 07:51:54.820318 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="extract-content" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.820326 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="extract-content" Jan 22 07:51:54 crc kubenswrapper[4982]: E0122 07:51:54.820343 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="extract-utilities" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.820350 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="extract-utilities" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.820553 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerName="registry-server" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.822206 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.828939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.920928 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content\") pod \"df77b6b6-b7de-4af1-9316-d509beb0de90\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921048 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2kj4\" (UniqueName: \"kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4\") pod \"df77b6b6-b7de-4af1-9316-d509beb0de90\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921098 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities\") pod \"df77b6b6-b7de-4af1-9316-d509beb0de90\" (UID: \"df77b6b6-b7de-4af1-9316-d509beb0de90\") " Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921406 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921580 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44rrj\" (UniqueName: \"kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.921930 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities" (OuterVolumeSpecName: "utilities") pod "df77b6b6-b7de-4af1-9316-d509beb0de90" (UID: "df77b6b6-b7de-4af1-9316-d509beb0de90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.927176 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4" (OuterVolumeSpecName: "kube-api-access-t2kj4") pod "df77b6b6-b7de-4af1-9316-d509beb0de90" (UID: "df77b6b6-b7de-4af1-9316-d509beb0de90"). InnerVolumeSpecName "kube-api-access-t2kj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:51:54 crc kubenswrapper[4982]: I0122 07:51:54.948158 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df77b6b6-b7de-4af1-9316-d509beb0de90" (UID: "df77b6b6-b7de-4af1-9316-d509beb0de90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.023883 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44rrj\" (UniqueName: \"kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024040 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024183 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024263 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024285 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2kj4\" (UniqueName: \"kubernetes.io/projected/df77b6b6-b7de-4af1-9316-d509beb0de90-kube-api-access-t2kj4\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024299 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df77b6b6-b7de-4af1-9316-d509beb0de90-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.024866 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.025467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.050955 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44rrj\" (UniqueName: \"kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj\") pod \"certified-operators-96z2r\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.143045 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.201705 4982 generic.go:334] "Generic (PLEG): container finished" podID="df77b6b6-b7de-4af1-9316-d509beb0de90" containerID="56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb" exitCode=0 Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.202223 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerDied","Data":"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb"} Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.202268 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4g85m" event={"ID":"df77b6b6-b7de-4af1-9316-d509beb0de90","Type":"ContainerDied","Data":"f8e7c1766ea332152f0e259aeef71300f53a275b6cd5f9a717ad1416b58baf2e"} Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.202305 4982 scope.go:117] "RemoveContainer" containerID="56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.202526 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4g85m" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.254601 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.272130 4982 scope.go:117] "RemoveContainer" containerID="b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.282821 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4g85m"] Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.365232 4982 scope.go:117] "RemoveContainer" containerID="1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.416031 4982 scope.go:117] "RemoveContainer" containerID="56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb" Jan 22 07:51:55 crc kubenswrapper[4982]: E0122 07:51:55.416934 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb\": container with ID starting with 56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb not found: ID does not exist" containerID="56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.416968 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb"} err="failed to get container status \"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb\": rpc error: code = NotFound desc = could not find container \"56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb\": container with ID starting with 56f274fd88cb639761014b7168fbcad40ffaf89cdce9a38f22af53d93f2bacdb not found: ID does not exist" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.416993 4982 scope.go:117] "RemoveContainer" containerID="b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87" Jan 22 07:51:55 crc kubenswrapper[4982]: E0122 07:51:55.419359 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87\": container with ID starting with b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87 not found: ID does not exist" containerID="b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.419392 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87"} err="failed to get container status \"b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87\": rpc error: code = NotFound desc = could not find container \"b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87\": container with ID starting with b4fdf26b73dc8372b9b3da06c00ff9ad39768667b5c43da33b552d3cc6628c87 not found: ID does not exist" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.419421 4982 scope.go:117] "RemoveContainer" containerID="1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9" Jan 22 07:51:55 crc kubenswrapper[4982]: E0122 07:51:55.428422 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9\": container with ID starting with 1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9 not found: ID does not exist" containerID="1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.428511 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9"} err="failed to get container status \"1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9\": rpc error: code = NotFound desc = could not find container \"1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9\": container with ID starting with 1e6a2805e8b037b410a32b94477e47928466fdf458637ef8f15ebcbd404e42a9 not found: ID does not exist" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.732402 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df77b6b6-b7de-4af1-9316-d509beb0de90" path="/var/lib/kubelet/pods/df77b6b6-b7de-4af1-9316-d509beb0de90/volumes" Jan 22 07:51:55 crc kubenswrapper[4982]: I0122 07:51:55.733598 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:51:56 crc kubenswrapper[4982]: I0122 07:51:56.214786 4982 generic.go:334] "Generic (PLEG): container finished" podID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerID="fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a" exitCode=0 Jan 22 07:51:56 crc kubenswrapper[4982]: I0122 07:51:56.214844 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerDied","Data":"fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a"} Jan 22 07:51:56 crc kubenswrapper[4982]: I0122 07:51:56.215115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerStarted","Data":"0239de485db1ecda357e8fc0c2c91824d1e6d15400fef7856c4e91a86f59b43d"} Jan 22 07:51:57 crc kubenswrapper[4982]: I0122 07:51:57.226354 4982 generic.go:334] "Generic (PLEG): container finished" podID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerID="07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969" exitCode=0 Jan 22 07:51:57 crc kubenswrapper[4982]: I0122 07:51:57.226403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerDied","Data":"07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969"} Jan 22 07:51:57 crc kubenswrapper[4982]: E0122 07:51:57.389404 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1714ab2d_3d59_4d0c_a3bd_b63314d446b1.slice/crio-conmon-07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1714ab2d_3d59_4d0c_a3bd_b63314d446b1.slice/crio-07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:51:58 crc kubenswrapper[4982]: I0122 07:51:58.719506 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:51:58 crc kubenswrapper[4982]: E0122 07:51:58.720101 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:51:59 crc kubenswrapper[4982]: I0122 07:51:59.245678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerStarted","Data":"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e"} Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.143343 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.144945 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.191756 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.211037 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-96z2r" podStartSLOduration=8.814614218 podStartE2EDuration="11.211019639s" podCreationTimestamp="2026-01-22 07:51:54 +0000 UTC" firstStartedPulling="2026-01-22 07:51:56.217346694 +0000 UTC m=+7577.055984697" lastFinishedPulling="2026-01-22 07:51:58.613752104 +0000 UTC m=+7579.452390118" observedRunningTime="2026-01-22 07:51:59.265238487 +0000 UTC m=+7580.103876510" watchObservedRunningTime="2026-01-22 07:52:05.211019639 +0000 UTC m=+7586.049657652" Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.368370 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:05 crc kubenswrapper[4982]: I0122 07:52:05.429196 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:52:07 crc kubenswrapper[4982]: I0122 07:52:07.338661 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-96z2r" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="registry-server" containerID="cri-o://b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e" gracePeriod=2 Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.022545 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.123311 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44rrj\" (UniqueName: \"kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj\") pod \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.123501 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content\") pod \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.123747 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities\") pod \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\" (UID: \"1714ab2d-3d59-4d0c-a3bd-b63314d446b1\") " Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.124548 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities" (OuterVolumeSpecName: "utilities") pod "1714ab2d-3d59-4d0c-a3bd-b63314d446b1" (UID: "1714ab2d-3d59-4d0c-a3bd-b63314d446b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.129453 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj" (OuterVolumeSpecName: "kube-api-access-44rrj") pod "1714ab2d-3d59-4d0c-a3bd-b63314d446b1" (UID: "1714ab2d-3d59-4d0c-a3bd-b63314d446b1"). InnerVolumeSpecName "kube-api-access-44rrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.169765 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1714ab2d-3d59-4d0c-a3bd-b63314d446b1" (UID: "1714ab2d-3d59-4d0c-a3bd-b63314d446b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.226352 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.226725 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44rrj\" (UniqueName: \"kubernetes.io/projected/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-kube-api-access-44rrj\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.226743 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714ab2d-3d59-4d0c-a3bd-b63314d446b1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.349600 4982 generic.go:334] "Generic (PLEG): container finished" podID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerID="b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e" exitCode=0 Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.349671 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96z2r" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.349670 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerDied","Data":"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e"} Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.349751 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96z2r" event={"ID":"1714ab2d-3d59-4d0c-a3bd-b63314d446b1","Type":"ContainerDied","Data":"0239de485db1ecda357e8fc0c2c91824d1e6d15400fef7856c4e91a86f59b43d"} Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.349779 4982 scope.go:117] "RemoveContainer" containerID="b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.372526 4982 scope.go:117] "RemoveContainer" containerID="07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.386258 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.394350 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-96z2r"] Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.414961 4982 scope.go:117] "RemoveContainer" containerID="fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.440827 4982 scope.go:117] "RemoveContainer" containerID="b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e" Jan 22 07:52:08 crc kubenswrapper[4982]: E0122 07:52:08.441255 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e\": container with ID starting with b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e not found: ID does not exist" containerID="b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.441290 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e"} err="failed to get container status \"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e\": rpc error: code = NotFound desc = could not find container \"b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e\": container with ID starting with b23232a2754db33416b8cfefbf31b9fd1c3d0c82563c02fb213c7fa503f57a7e not found: ID does not exist" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.441326 4982 scope.go:117] "RemoveContainer" containerID="07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969" Jan 22 07:52:08 crc kubenswrapper[4982]: E0122 07:52:08.441582 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969\": container with ID starting with 07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969 not found: ID does not exist" containerID="07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.441609 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969"} err="failed to get container status \"07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969\": rpc error: code = NotFound desc = could not find container \"07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969\": container with ID starting with 07369042acdc034dfd4089ef3073eae9f975c8ef1a6389e4ac06c3a09cbb9969 not found: ID does not exist" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.441624 4982 scope.go:117] "RemoveContainer" containerID="fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a" Jan 22 07:52:08 crc kubenswrapper[4982]: E0122 07:52:08.442086 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a\": container with ID starting with fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a not found: ID does not exist" containerID="fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a" Jan 22 07:52:08 crc kubenswrapper[4982]: I0122 07:52:08.442113 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a"} err="failed to get container status \"fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a\": rpc error: code = NotFound desc = could not find container \"fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a\": container with ID starting with fe8366492a10afd0551608b3275d5a7c1451307c0a2c1d4875bc370b5931245a not found: ID does not exist" Jan 22 07:52:09 crc kubenswrapper[4982]: I0122 07:52:09.728610 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:52:09 crc kubenswrapper[4982]: E0122 07:52:09.729198 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:52:09 crc kubenswrapper[4982]: I0122 07:52:09.733579 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" path="/var/lib/kubelet/pods/1714ab2d-3d59-4d0c-a3bd-b63314d446b1/volumes" Jan 22 07:52:22 crc kubenswrapper[4982]: I0122 07:52:22.720182 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:52:22 crc kubenswrapper[4982]: E0122 07:52:22.720972 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:52:29 crc kubenswrapper[4982]: I0122 07:52:29.567094 4982 generic.go:334] "Generic (PLEG): container finished" podID="6fc5f931-2ac0-4cf7-9938-532c8e526580" containerID="e2d0250fc7b2480c6daa5a9879a1ba9cd38091ed62527651d7167c7a035e64ce" exitCode=0 Jan 22 07:52:29 crc kubenswrapper[4982]: I0122 07:52:29.567165 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-gpklq" event={"ID":"6fc5f931-2ac0-4cf7-9938-532c8e526580","Type":"ContainerDied","Data":"e2d0250fc7b2480c6daa5a9879a1ba9cd38091ed62527651d7167c7a035e64ce"} Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.094662 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.241316 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory\") pod \"6fc5f931-2ac0-4cf7-9938-532c8e526580\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.241393 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r74pb\" (UniqueName: \"kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb\") pod \"6fc5f931-2ac0-4cf7-9938-532c8e526580\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.241511 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph\") pod \"6fc5f931-2ac0-4cf7-9938-532c8e526580\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.241567 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1\") pod \"6fc5f931-2ac0-4cf7-9938-532c8e526580\" (UID: \"6fc5f931-2ac0-4cf7-9938-532c8e526580\") " Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.247280 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph" (OuterVolumeSpecName: "ceph") pod "6fc5f931-2ac0-4cf7-9938-532c8e526580" (UID: "6fc5f931-2ac0-4cf7-9938-532c8e526580"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.247294 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb" (OuterVolumeSpecName: "kube-api-access-r74pb") pod "6fc5f931-2ac0-4cf7-9938-532c8e526580" (UID: "6fc5f931-2ac0-4cf7-9938-532c8e526580"). InnerVolumeSpecName "kube-api-access-r74pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.270540 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory" (OuterVolumeSpecName: "inventory") pod "6fc5f931-2ac0-4cf7-9938-532c8e526580" (UID: "6fc5f931-2ac0-4cf7-9938-532c8e526580"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.271024 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "6fc5f931-2ac0-4cf7-9938-532c8e526580" (UID: "6fc5f931-2ac0-4cf7-9938-532c8e526580"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.344426 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.344463 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.344473 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fc5f931-2ac0-4cf7-9938-532c8e526580-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.344483 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r74pb\" (UniqueName: \"kubernetes.io/projected/6fc5f931-2ac0-4cf7-9938-532c8e526580-kube-api-access-r74pb\") on node \"crc\" DevicePath \"\"" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.595824 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-gpklq" event={"ID":"6fc5f931-2ac0-4cf7-9938-532c8e526580","Type":"ContainerDied","Data":"708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb"} Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.595879 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="708c64f2a32c398a48a0f03cab5ea83fae4248e228835cf491cd7f72ffb02adb" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.595909 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-gpklq" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.694549 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-ntxdv"] Jan 22 07:52:31 crc kubenswrapper[4982]: E0122 07:52:31.695096 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc5f931-2ac0-4cf7-9938-532c8e526580" containerName="install-os-openstack-openstack-cell1" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695121 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc5f931-2ac0-4cf7-9938-532c8e526580" containerName="install-os-openstack-openstack-cell1" Jan 22 07:52:31 crc kubenswrapper[4982]: E0122 07:52:31.695147 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="registry-server" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695156 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="registry-server" Jan 22 07:52:31 crc kubenswrapper[4982]: E0122 07:52:31.695179 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="extract-content" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695189 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="extract-content" Jan 22 07:52:31 crc kubenswrapper[4982]: E0122 07:52:31.695234 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="extract-utilities" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695242 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="extract-utilities" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695573 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fc5f931-2ac0-4cf7-9938-532c8e526580" containerName="install-os-openstack-openstack-cell1" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.695613 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1714ab2d-3d59-4d0c-a3bd-b63314d446b1" containerName="registry-server" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.696533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.698803 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.699486 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.699545 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.699627 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.704449 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-ntxdv"] Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.755101 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.755185 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.755217 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.755311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcnbf\" (UniqueName: \"kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.857686 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.857822 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.857877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.858012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcnbf\" (UniqueName: \"kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.862217 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.863320 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.863495 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:31 crc kubenswrapper[4982]: I0122 07:52:31.876298 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcnbf\" (UniqueName: \"kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf\") pod \"configure-os-openstack-openstack-cell1-ntxdv\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:32 crc kubenswrapper[4982]: I0122 07:52:32.013138 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:52:32 crc kubenswrapper[4982]: I0122 07:52:32.575938 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-ntxdv"] Jan 22 07:52:32 crc kubenswrapper[4982]: I0122 07:52:32.607765 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" event={"ID":"4349a4d4-0264-4986-b1ba-02ee440c913b","Type":"ContainerStarted","Data":"560e7bd1e8ed6ec886d51194345e272e642e9f90ec30a342cdf76e82accf1310"} Jan 22 07:52:33 crc kubenswrapper[4982]: I0122 07:52:33.617623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" event={"ID":"4349a4d4-0264-4986-b1ba-02ee440c913b","Type":"ContainerStarted","Data":"4f1c91ac3023062dee3685ceb14fe094e5d1205c44e9bf4da19b0181533c53de"} Jan 22 07:52:34 crc kubenswrapper[4982]: I0122 07:52:34.719255 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:52:34 crc kubenswrapper[4982]: E0122 07:52:34.720349 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:52:48 crc kubenswrapper[4982]: I0122 07:52:48.719544 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:52:48 crc kubenswrapper[4982]: E0122 07:52:48.720452 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:52:59 crc kubenswrapper[4982]: I0122 07:52:59.725710 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:52:59 crc kubenswrapper[4982]: E0122 07:52:59.726553 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.217772 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" podStartSLOduration=36.794470332 podStartE2EDuration="37.217754201s" podCreationTimestamp="2026-01-22 07:52:31 +0000 UTC" firstStartedPulling="2026-01-22 07:52:32.57968504 +0000 UTC m=+7613.418323043" lastFinishedPulling="2026-01-22 07:52:33.002968869 +0000 UTC m=+7613.841606912" observedRunningTime="2026-01-22 07:52:33.641082249 +0000 UTC m=+7614.479720252" watchObservedRunningTime="2026-01-22 07:53:08.217754201 +0000 UTC m=+7649.056392194" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.230708 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dmtbp"] Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.233106 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.240861 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmtbp"] Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.323826 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-catalog-content\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.324238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-utilities\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.324374 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmg5j\" (UniqueName: \"kubernetes.io/projected/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-kube-api-access-bmg5j\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.426289 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-utilities\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.426385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmg5j\" (UniqueName: \"kubernetes.io/projected/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-kube-api-access-bmg5j\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.426542 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-catalog-content\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.427065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-utilities\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.427144 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-catalog-content\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.450463 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmg5j\" (UniqueName: \"kubernetes.io/projected/0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2-kube-api-access-bmg5j\") pod \"community-operators-dmtbp\" (UID: \"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2\") " pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:08 crc kubenswrapper[4982]: I0122 07:53:08.564773 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:09 crc kubenswrapper[4982]: I0122 07:53:09.054976 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmtbp"] Jan 22 07:53:09 crc kubenswrapper[4982]: W0122 07:53:09.055468 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e5bfcba_8e81_47f0_8fe9_7607e9cbc9e2.slice/crio-4a9a92ed9f1c5f96cc79464a3dff3fdabae12d248a4b28a21aa7515728efe146 WatchSource:0}: Error finding container 4a9a92ed9f1c5f96cc79464a3dff3fdabae12d248a4b28a21aa7515728efe146: Status 404 returned error can't find the container with id 4a9a92ed9f1c5f96cc79464a3dff3fdabae12d248a4b28a21aa7515728efe146 Jan 22 07:53:09 crc kubenswrapper[4982]: I0122 07:53:09.216758 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmtbp" event={"ID":"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2","Type":"ContainerStarted","Data":"4a9a92ed9f1c5f96cc79464a3dff3fdabae12d248a4b28a21aa7515728efe146"} Jan 22 07:53:09 crc kubenswrapper[4982]: E0122 07:53:09.432559 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e5bfcba_8e81_47f0_8fe9_7607e9cbc9e2.slice/crio-conmon-96ebd4c4540644219f8e7f5048d60a74481b27ab1bdb183392f61c7d6f9d891b.scope\": RecentStats: unable to find data in memory cache]" Jan 22 07:53:10 crc kubenswrapper[4982]: I0122 07:53:10.228642 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2" containerID="96ebd4c4540644219f8e7f5048d60a74481b27ab1bdb183392f61c7d6f9d891b" exitCode=0 Jan 22 07:53:10 crc kubenswrapper[4982]: I0122 07:53:10.228705 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmtbp" event={"ID":"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2","Type":"ContainerDied","Data":"96ebd4c4540644219f8e7f5048d60a74481b27ab1bdb183392f61c7d6f9d891b"} Jan 22 07:53:10 crc kubenswrapper[4982]: I0122 07:53:10.719649 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:53:10 crc kubenswrapper[4982]: E0122 07:53:10.720168 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:53:14 crc kubenswrapper[4982]: I0122 07:53:14.293312 4982 generic.go:334] "Generic (PLEG): container finished" podID="4349a4d4-0264-4986-b1ba-02ee440c913b" containerID="4f1c91ac3023062dee3685ceb14fe094e5d1205c44e9bf4da19b0181533c53de" exitCode=2 Jan 22 07:53:14 crc kubenswrapper[4982]: I0122 07:53:14.293425 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" event={"ID":"4349a4d4-0264-4986-b1ba-02ee440c913b","Type":"ContainerDied","Data":"4f1c91ac3023062dee3685ceb14fe094e5d1205c44e9bf4da19b0181533c53de"} Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.768545 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.902951 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcnbf\" (UniqueName: \"kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf\") pod \"4349a4d4-0264-4986-b1ba-02ee440c913b\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.903024 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory\") pod \"4349a4d4-0264-4986-b1ba-02ee440c913b\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.903140 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph\") pod \"4349a4d4-0264-4986-b1ba-02ee440c913b\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.903217 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1\") pod \"4349a4d4-0264-4986-b1ba-02ee440c913b\" (UID: \"4349a4d4-0264-4986-b1ba-02ee440c913b\") " Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.913202 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph" (OuterVolumeSpecName: "ceph") pod "4349a4d4-0264-4986-b1ba-02ee440c913b" (UID: "4349a4d4-0264-4986-b1ba-02ee440c913b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.913407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf" (OuterVolumeSpecName: "kube-api-access-bcnbf") pod "4349a4d4-0264-4986-b1ba-02ee440c913b" (UID: "4349a4d4-0264-4986-b1ba-02ee440c913b"). InnerVolumeSpecName "kube-api-access-bcnbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.934161 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory" (OuterVolumeSpecName: "inventory") pod "4349a4d4-0264-4986-b1ba-02ee440c913b" (UID: "4349a4d4-0264-4986-b1ba-02ee440c913b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:53:15 crc kubenswrapper[4982]: I0122 07:53:15.946054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "4349a4d4-0264-4986-b1ba-02ee440c913b" (UID: "4349a4d4-0264-4986-b1ba-02ee440c913b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.005761 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcnbf\" (UniqueName: \"kubernetes.io/projected/4349a4d4-0264-4986-b1ba-02ee440c913b-kube-api-access-bcnbf\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.005810 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.005823 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.005834 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4349a4d4-0264-4986-b1ba-02ee440c913b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.313657 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" event={"ID":"4349a4d4-0264-4986-b1ba-02ee440c913b","Type":"ContainerDied","Data":"560e7bd1e8ed6ec886d51194345e272e642e9f90ec30a342cdf76e82accf1310"} Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.313704 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="560e7bd1e8ed6ec886d51194345e272e642e9f90ec30a342cdf76e82accf1310" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.313731 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-ntxdv" Jan 22 07:53:16 crc kubenswrapper[4982]: I0122 07:53:16.315767 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmtbp" event={"ID":"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2","Type":"ContainerStarted","Data":"68e71d590ef91ae31435ac37f411ed473ee7bc0faaea8c6caf2ae1cd0ba2a1fc"} Jan 22 07:53:17 crc kubenswrapper[4982]: I0122 07:53:17.326940 4982 generic.go:334] "Generic (PLEG): container finished" podID="0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2" containerID="68e71d590ef91ae31435ac37f411ed473ee7bc0faaea8c6caf2ae1cd0ba2a1fc" exitCode=0 Jan 22 07:53:17 crc kubenswrapper[4982]: I0122 07:53:17.327000 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmtbp" event={"ID":"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2","Type":"ContainerDied","Data":"68e71d590ef91ae31435ac37f411ed473ee7bc0faaea8c6caf2ae1cd0ba2a1fc"} Jan 22 07:53:19 crc kubenswrapper[4982]: I0122 07:53:19.347654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmtbp" event={"ID":"0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2","Type":"ContainerStarted","Data":"5872d1563b7887502ce899634bb06574aa0a405de827b905a7128604a2eda795"} Jan 22 07:53:19 crc kubenswrapper[4982]: I0122 07:53:19.376767 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dmtbp" podStartSLOduration=3.035078956 podStartE2EDuration="11.376700654s" podCreationTimestamp="2026-01-22 07:53:08 +0000 UTC" firstStartedPulling="2026-01-22 07:53:10.231635307 +0000 UTC m=+7651.070273310" lastFinishedPulling="2026-01-22 07:53:18.573257005 +0000 UTC m=+7659.411895008" observedRunningTime="2026-01-22 07:53:19.367185486 +0000 UTC m=+7660.205823579" watchObservedRunningTime="2026-01-22 07:53:19.376700654 +0000 UTC m=+7660.215338687" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.041562 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-c8gjr"] Jan 22 07:53:23 crc kubenswrapper[4982]: E0122 07:53:23.042428 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4349a4d4-0264-4986-b1ba-02ee440c913b" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.042558 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4349a4d4-0264-4986-b1ba-02ee440c913b" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.042838 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4349a4d4-0264-4986-b1ba-02ee440c913b" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.043907 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.048167 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.048474 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.048782 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.049010 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.075024 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-c8gjr"] Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.171967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.172038 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nhhz\" (UniqueName: \"kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.172113 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.172143 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.274784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.274918 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nhhz\" (UniqueName: \"kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.274972 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.275004 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.285937 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.286001 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.285954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.310498 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nhhz\" (UniqueName: \"kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz\") pod \"configure-os-openstack-openstack-cell1-c8gjr\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.379729 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.720147 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:53:23 crc kubenswrapper[4982]: E0122 07:53:23.720753 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:53:23 crc kubenswrapper[4982]: I0122 07:53:23.946045 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-c8gjr"] Jan 22 07:53:23 crc kubenswrapper[4982]: W0122 07:53:23.953307 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b3e9d65_16d2_49f1_8953_84799f33a04a.slice/crio-70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1 WatchSource:0}: Error finding container 70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1: Status 404 returned error can't find the container with id 70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1 Jan 22 07:53:24 crc kubenswrapper[4982]: I0122 07:53:24.395968 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" event={"ID":"2b3e9d65-16d2-49f1-8953-84799f33a04a","Type":"ContainerStarted","Data":"70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1"} Jan 22 07:53:25 crc kubenswrapper[4982]: I0122 07:53:25.416937 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" event={"ID":"2b3e9d65-16d2-49f1-8953-84799f33a04a","Type":"ContainerStarted","Data":"200367505b6b15cb44bc62d4da783056b3dbb194cf924358425b76896414f1f4"} Jan 22 07:53:25 crc kubenswrapper[4982]: I0122 07:53:25.440693 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" podStartSLOduration=2.032666664 podStartE2EDuration="2.440669799s" podCreationTimestamp="2026-01-22 07:53:23 +0000 UTC" firstStartedPulling="2026-01-22 07:53:23.955647761 +0000 UTC m=+7664.794285764" lastFinishedPulling="2026-01-22 07:53:24.363650886 +0000 UTC m=+7665.202288899" observedRunningTime="2026-01-22 07:53:25.434919562 +0000 UTC m=+7666.273557575" watchObservedRunningTime="2026-01-22 07:53:25.440669799 +0000 UTC m=+7666.279307802" Jan 22 07:53:28 crc kubenswrapper[4982]: I0122 07:53:28.565352 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:28 crc kubenswrapper[4982]: I0122 07:53:28.566645 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:28 crc kubenswrapper[4982]: I0122 07:53:28.627916 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:29 crc kubenswrapper[4982]: I0122 07:53:29.553469 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dmtbp" Jan 22 07:53:29 crc kubenswrapper[4982]: I0122 07:53:29.647235 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmtbp"] Jan 22 07:53:29 crc kubenswrapper[4982]: I0122 07:53:29.691152 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 07:53:29 crc kubenswrapper[4982]: I0122 07:53:29.691657 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mxbrn" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="registry-server" containerID="cri-o://7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56" gracePeriod=2 Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.384601 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.481658 4982 generic.go:334] "Generic (PLEG): container finished" podID="58f845d6-63f8-420f-ab6d-deb415896e42" containerID="7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56" exitCode=0 Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.481728 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mxbrn" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.481730 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerDied","Data":"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56"} Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.481779 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mxbrn" event={"ID":"58f845d6-63f8-420f-ab6d-deb415896e42","Type":"ContainerDied","Data":"6d5c9c555f43995da052f97e9e5e9d59dccfdb2176b7197c80a110970a68ec92"} Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.481797 4982 scope.go:117] "RemoveContainer" containerID="7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.535561 4982 scope.go:117] "RemoveContainer" containerID="74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.558063 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmgjj\" (UniqueName: \"kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj\") pod \"58f845d6-63f8-420f-ab6d-deb415896e42\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.558294 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content\") pod \"58f845d6-63f8-420f-ab6d-deb415896e42\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.558385 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities\") pod \"58f845d6-63f8-420f-ab6d-deb415896e42\" (UID: \"58f845d6-63f8-420f-ab6d-deb415896e42\") " Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.564311 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities" (OuterVolumeSpecName: "utilities") pod "58f845d6-63f8-420f-ab6d-deb415896e42" (UID: "58f845d6-63f8-420f-ab6d-deb415896e42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.570565 4982 scope.go:117] "RemoveContainer" containerID="77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.576188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj" (OuterVolumeSpecName: "kube-api-access-jmgjj") pod "58f845d6-63f8-420f-ab6d-deb415896e42" (UID: "58f845d6-63f8-420f-ab6d-deb415896e42"). InnerVolumeSpecName "kube-api-access-jmgjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.636474 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58f845d6-63f8-420f-ab6d-deb415896e42" (UID: "58f845d6-63f8-420f-ab6d-deb415896e42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.660943 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmgjj\" (UniqueName: \"kubernetes.io/projected/58f845d6-63f8-420f-ab6d-deb415896e42-kube-api-access-jmgjj\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.660976 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.660985 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58f845d6-63f8-420f-ab6d-deb415896e42-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.670529 4982 scope.go:117] "RemoveContainer" containerID="7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56" Jan 22 07:53:30 crc kubenswrapper[4982]: E0122 07:53:30.671826 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56\": container with ID starting with 7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56 not found: ID does not exist" containerID="7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.671885 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56"} err="failed to get container status \"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56\": rpc error: code = NotFound desc = could not find container \"7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56\": container with ID starting with 7947d4aa9ac02b0a9b792814542a94809b056e9ba61d94a0b863c6c9b6c5fb56 not found: ID does not exist" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.671911 4982 scope.go:117] "RemoveContainer" containerID="74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672" Jan 22 07:53:30 crc kubenswrapper[4982]: E0122 07:53:30.672272 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672\": container with ID starting with 74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672 not found: ID does not exist" containerID="74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.672342 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672"} err="failed to get container status \"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672\": rpc error: code = NotFound desc = could not find container \"74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672\": container with ID starting with 74e68eca27633fc259f1c8d139a19185209984f2308567e0d2b9d90d4d24c672 not found: ID does not exist" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.672389 4982 scope.go:117] "RemoveContainer" containerID="77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c" Jan 22 07:53:30 crc kubenswrapper[4982]: E0122 07:53:30.672722 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c\": container with ID starting with 77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c not found: ID does not exist" containerID="77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.672750 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c"} err="failed to get container status \"77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c\": rpc error: code = NotFound desc = could not find container \"77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c\": container with ID starting with 77a99a9ad58c99c370ec33aacade3e5f034cd353940e886d10aa3ecb19503f5c not found: ID does not exist" Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.868120 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 07:53:30 crc kubenswrapper[4982]: I0122 07:53:30.873729 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mxbrn"] Jan 22 07:53:31 crc kubenswrapper[4982]: I0122 07:53:31.729443 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" path="/var/lib/kubelet/pods/58f845d6-63f8-420f-ab6d-deb415896e42/volumes" Jan 22 07:53:38 crc kubenswrapper[4982]: I0122 07:53:38.719389 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:53:38 crc kubenswrapper[4982]: E0122 07:53:38.720186 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:53:51 crc kubenswrapper[4982]: I0122 07:53:51.719251 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:53:51 crc kubenswrapper[4982]: E0122 07:53:51.720064 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:54:02 crc kubenswrapper[4982]: I0122 07:54:02.838800 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b3e9d65-16d2-49f1-8953-84799f33a04a" containerID="200367505b6b15cb44bc62d4da783056b3dbb194cf924358425b76896414f1f4" exitCode=2 Jan 22 07:54:02 crc kubenswrapper[4982]: I0122 07:54:02.838883 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" event={"ID":"2b3e9d65-16d2-49f1-8953-84799f33a04a","Type":"ContainerDied","Data":"200367505b6b15cb44bc62d4da783056b3dbb194cf924358425b76896414f1f4"} Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.264169 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.368906 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1\") pod \"2b3e9d65-16d2-49f1-8953-84799f33a04a\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.368968 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory\") pod \"2b3e9d65-16d2-49f1-8953-84799f33a04a\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.369173 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nhhz\" (UniqueName: \"kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz\") pod \"2b3e9d65-16d2-49f1-8953-84799f33a04a\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.369212 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph\") pod \"2b3e9d65-16d2-49f1-8953-84799f33a04a\" (UID: \"2b3e9d65-16d2-49f1-8953-84799f33a04a\") " Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.374304 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz" (OuterVolumeSpecName: "kube-api-access-6nhhz") pod "2b3e9d65-16d2-49f1-8953-84799f33a04a" (UID: "2b3e9d65-16d2-49f1-8953-84799f33a04a"). InnerVolumeSpecName "kube-api-access-6nhhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.375874 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph" (OuterVolumeSpecName: "ceph") pod "2b3e9d65-16d2-49f1-8953-84799f33a04a" (UID: "2b3e9d65-16d2-49f1-8953-84799f33a04a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.401394 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "2b3e9d65-16d2-49f1-8953-84799f33a04a" (UID: "2b3e9d65-16d2-49f1-8953-84799f33a04a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.403459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory" (OuterVolumeSpecName: "inventory") pod "2b3e9d65-16d2-49f1-8953-84799f33a04a" (UID: "2b3e9d65-16d2-49f1-8953-84799f33a04a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.472284 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.472341 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.472361 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b3e9d65-16d2-49f1-8953-84799f33a04a-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.472382 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nhhz\" (UniqueName: \"kubernetes.io/projected/2b3e9d65-16d2-49f1-8953-84799f33a04a-kube-api-access-6nhhz\") on node \"crc\" DevicePath \"\"" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.719970 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:54:04 crc kubenswrapper[4982]: E0122 07:54:04.720386 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.859482 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" event={"ID":"2b3e9d65-16d2-49f1-8953-84799f33a04a","Type":"ContainerDied","Data":"70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1"} Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.859583 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70fb22312cb92f5bd20a4c721ffdd7e05442e79a1b1af338fc5c183b1f111af1" Jan 22 07:54:04 crc kubenswrapper[4982]: I0122 07:54:04.859503 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-c8gjr" Jan 22 07:54:17 crc kubenswrapper[4982]: I0122 07:54:17.719527 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:54:17 crc kubenswrapper[4982]: E0122 07:54:17.720367 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.040085 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-tdn6j"] Jan 22 07:54:22 crc kubenswrapper[4982]: E0122 07:54:22.041124 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="extract-utilities" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041139 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="extract-utilities" Jan 22 07:54:22 crc kubenswrapper[4982]: E0122 07:54:22.041162 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="extract-content" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041170 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="extract-content" Jan 22 07:54:22 crc kubenswrapper[4982]: E0122 07:54:22.041184 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="registry-server" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041190 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="registry-server" Jan 22 07:54:22 crc kubenswrapper[4982]: E0122 07:54:22.041215 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b3e9d65-16d2-49f1-8953-84799f33a04a" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041220 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b3e9d65-16d2-49f1-8953-84799f33a04a" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041419 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="58f845d6-63f8-420f-ab6d-deb415896e42" containerName="registry-server" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.041433 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b3e9d65-16d2-49f1-8953-84799f33a04a" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.042209 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.044300 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.044466 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.044584 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.045410 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.054954 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-tdn6j"] Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.065133 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.065449 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4mnw\" (UniqueName: \"kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.065632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.065802 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.167888 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.167980 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.168064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.168152 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4mnw\" (UniqueName: \"kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.173525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.173542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.180402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.184434 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4mnw\" (UniqueName: \"kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw\") pod \"configure-os-openstack-openstack-cell1-tdn6j\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.369530 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:54:22 crc kubenswrapper[4982]: I0122 07:54:22.927263 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-tdn6j"] Jan 22 07:54:23 crc kubenswrapper[4982]: I0122 07:54:23.058653 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" event={"ID":"ce78aa73-e7c6-49e6-ab7f-feea739308b3","Type":"ContainerStarted","Data":"f00d0c0505c54c342f75c014cc6f3852663b8a4fec2dcd81067110c2ae37e82d"} Jan 22 07:54:24 crc kubenswrapper[4982]: I0122 07:54:24.069304 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" event={"ID":"ce78aa73-e7c6-49e6-ab7f-feea739308b3","Type":"ContainerStarted","Data":"1cd42b83007f8b39c128fa93271ccac642894fc91ebe03f8f665151d23a46cf3"} Jan 22 07:54:24 crc kubenswrapper[4982]: I0122 07:54:24.103195 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" podStartSLOduration=1.6047216180000001 podStartE2EDuration="2.103160703s" podCreationTimestamp="2026-01-22 07:54:22 +0000 UTC" firstStartedPulling="2026-01-22 07:54:22.931282291 +0000 UTC m=+7723.769920294" lastFinishedPulling="2026-01-22 07:54:23.429721366 +0000 UTC m=+7724.268359379" observedRunningTime="2026-01-22 07:54:24.086646815 +0000 UTC m=+7724.925284818" watchObservedRunningTime="2026-01-22 07:54:24.103160703 +0000 UTC m=+7724.941798726" Jan 22 07:54:30 crc kubenswrapper[4982]: I0122 07:54:30.719280 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:54:30 crc kubenswrapper[4982]: E0122 07:54:30.720036 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:54:44 crc kubenswrapper[4982]: I0122 07:54:44.719949 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:54:44 crc kubenswrapper[4982]: E0122 07:54:44.721075 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 07:54:55 crc kubenswrapper[4982]: I0122 07:54:55.720585 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:54:56 crc kubenswrapper[4982]: I0122 07:54:56.394449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7"} Jan 22 07:55:00 crc kubenswrapper[4982]: I0122 07:55:00.429010 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce78aa73-e7c6-49e6-ab7f-feea739308b3" containerID="1cd42b83007f8b39c128fa93271ccac642894fc91ebe03f8f665151d23a46cf3" exitCode=2 Jan 22 07:55:00 crc kubenswrapper[4982]: I0122 07:55:00.429218 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" event={"ID":"ce78aa73-e7c6-49e6-ab7f-feea739308b3","Type":"ContainerDied","Data":"1cd42b83007f8b39c128fa93271ccac642894fc91ebe03f8f665151d23a46cf3"} Jan 22 07:55:01 crc kubenswrapper[4982]: I0122 07:55:01.928029 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.029890 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4mnw\" (UniqueName: \"kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw\") pod \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.029944 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph\") pod \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.030092 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1\") pod \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.030153 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory\") pod \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\" (UID: \"ce78aa73-e7c6-49e6-ab7f-feea739308b3\") " Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.035923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph" (OuterVolumeSpecName: "ceph") pod "ce78aa73-e7c6-49e6-ab7f-feea739308b3" (UID: "ce78aa73-e7c6-49e6-ab7f-feea739308b3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.036450 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw" (OuterVolumeSpecName: "kube-api-access-g4mnw") pod "ce78aa73-e7c6-49e6-ab7f-feea739308b3" (UID: "ce78aa73-e7c6-49e6-ab7f-feea739308b3"). InnerVolumeSpecName "kube-api-access-g4mnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.063949 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "ce78aa73-e7c6-49e6-ab7f-feea739308b3" (UID: "ce78aa73-e7c6-49e6-ab7f-feea739308b3"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.066069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory" (OuterVolumeSpecName: "inventory") pod "ce78aa73-e7c6-49e6-ab7f-feea739308b3" (UID: "ce78aa73-e7c6-49e6-ab7f-feea739308b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.133017 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4mnw\" (UniqueName: \"kubernetes.io/projected/ce78aa73-e7c6-49e6-ab7f-feea739308b3-kube-api-access-g4mnw\") on node \"crc\" DevicePath \"\"" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.133384 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.133399 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.133414 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce78aa73-e7c6-49e6-ab7f-feea739308b3-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.452903 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" event={"ID":"ce78aa73-e7c6-49e6-ab7f-feea739308b3","Type":"ContainerDied","Data":"f00d0c0505c54c342f75c014cc6f3852663b8a4fec2dcd81067110c2ae37e82d"} Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.452964 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f00d0c0505c54c342f75c014cc6f3852663b8a4fec2dcd81067110c2ae37e82d" Jan 22 07:55:02 crc kubenswrapper[4982]: I0122 07:55:02.452997 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-tdn6j" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.044930 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-4q4lz"] Jan 22 07:55:39 crc kubenswrapper[4982]: E0122 07:55:39.046154 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce78aa73-e7c6-49e6-ab7f-feea739308b3" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.046176 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce78aa73-e7c6-49e6-ab7f-feea739308b3" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.046417 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce78aa73-e7c6-49e6-ab7f-feea739308b3" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.047443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.050585 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.050619 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.050805 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mjzt9" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.052314 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.066272 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.066362 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.066441 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.066544 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2zbc\" (UniqueName: \"kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.079329 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-4q4lz"] Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.169354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.169427 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2zbc\" (UniqueName: \"kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.169632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.169740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.176744 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.177491 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.178565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.186381 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2zbc\" (UniqueName: \"kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc\") pod \"configure-os-openstack-openstack-cell1-4q4lz\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.374095 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:55:39 crc kubenswrapper[4982]: I0122 07:55:39.942968 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-4q4lz"] Jan 22 07:55:40 crc kubenswrapper[4982]: I0122 07:55:40.376373 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 07:55:40 crc kubenswrapper[4982]: I0122 07:55:40.836682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" event={"ID":"6549173b-9ad0-4d54-b384-8eec92047777","Type":"ContainerStarted","Data":"e8bcf57752a9ad624da6cf493c0a23c0d4a06811eb9b5f4bd9fa2c59072d40ba"} Jan 22 07:55:40 crc kubenswrapper[4982]: I0122 07:55:40.836979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" event={"ID":"6549173b-9ad0-4d54-b384-8eec92047777","Type":"ContainerStarted","Data":"9bab4adfce1a2d55be4749a7111b088b60cf081d1e291128f76632518526670e"} Jan 22 07:55:40 crc kubenswrapper[4982]: I0122 07:55:40.866318 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" podStartSLOduration=1.442972417 podStartE2EDuration="1.866297807s" podCreationTimestamp="2026-01-22 07:55:39 +0000 UTC" firstStartedPulling="2026-01-22 07:55:39.950053271 +0000 UTC m=+7800.788691284" lastFinishedPulling="2026-01-22 07:55:40.373378661 +0000 UTC m=+7801.212016674" observedRunningTime="2026-01-22 07:55:40.861600539 +0000 UTC m=+7801.700238572" watchObservedRunningTime="2026-01-22 07:55:40.866297807 +0000 UTC m=+7801.704935820" Jan 22 07:56:19 crc kubenswrapper[4982]: I0122 07:56:19.253987 4982 generic.go:334] "Generic (PLEG): container finished" podID="6549173b-9ad0-4d54-b384-8eec92047777" containerID="e8bcf57752a9ad624da6cf493c0a23c0d4a06811eb9b5f4bd9fa2c59072d40ba" exitCode=2 Jan 22 07:56:19 crc kubenswrapper[4982]: I0122 07:56:19.254088 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" event={"ID":"6549173b-9ad0-4d54-b384-8eec92047777","Type":"ContainerDied","Data":"e8bcf57752a9ad624da6cf493c0a23c0d4a06811eb9b5f4bd9fa2c59072d40ba"} Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.712026 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.814413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph\") pod \"6549173b-9ad0-4d54-b384-8eec92047777\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.814468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1\") pod \"6549173b-9ad0-4d54-b384-8eec92047777\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.814599 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory\") pod \"6549173b-9ad0-4d54-b384-8eec92047777\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.814668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2zbc\" (UniqueName: \"kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc\") pod \"6549173b-9ad0-4d54-b384-8eec92047777\" (UID: \"6549173b-9ad0-4d54-b384-8eec92047777\") " Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.821149 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc" (OuterVolumeSpecName: "kube-api-access-z2zbc") pod "6549173b-9ad0-4d54-b384-8eec92047777" (UID: "6549173b-9ad0-4d54-b384-8eec92047777"). InnerVolumeSpecName "kube-api-access-z2zbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.821243 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph" (OuterVolumeSpecName: "ceph") pod "6549173b-9ad0-4d54-b384-8eec92047777" (UID: "6549173b-9ad0-4d54-b384-8eec92047777"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.850274 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory" (OuterVolumeSpecName: "inventory") pod "6549173b-9ad0-4d54-b384-8eec92047777" (UID: "6549173b-9ad0-4d54-b384-8eec92047777"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.850630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "6549173b-9ad0-4d54-b384-8eec92047777" (UID: "6549173b-9ad0-4d54-b384-8eec92047777"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.916743 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.916779 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2zbc\" (UniqueName: \"kubernetes.io/projected/6549173b-9ad0-4d54-b384-8eec92047777-kube-api-access-z2zbc\") on node \"crc\" DevicePath \"\"" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.916791 4982 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ceph\") on node \"crc\" DevicePath \"\"" Jan 22 07:56:20 crc kubenswrapper[4982]: I0122 07:56:20.916803 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6549173b-9ad0-4d54-b384-8eec92047777-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 07:56:21 crc kubenswrapper[4982]: I0122 07:56:21.275224 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" event={"ID":"6549173b-9ad0-4d54-b384-8eec92047777","Type":"ContainerDied","Data":"9bab4adfce1a2d55be4749a7111b088b60cf081d1e291128f76632518526670e"} Jan 22 07:56:21 crc kubenswrapper[4982]: I0122 07:56:21.275270 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bab4adfce1a2d55be4749a7111b088b60cf081d1e291128f76632518526670e" Jan 22 07:56:21 crc kubenswrapper[4982]: I0122 07:56:21.275331 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-4q4lz" Jan 22 07:57:18 crc kubenswrapper[4982]: I0122 07:57:18.974475 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:57:18 crc kubenswrapper[4982]: I0122 07:57:18.975058 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.067818 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gtxqn/must-gather-7npgj"] Jan 22 07:57:23 crc kubenswrapper[4982]: E0122 07:57:23.071835 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6549173b-9ad0-4d54-b384-8eec92047777" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.071953 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6549173b-9ad0-4d54-b384-8eec92047777" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.073479 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6549173b-9ad0-4d54-b384-8eec92047777" containerName="configure-os-openstack-openstack-cell1" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.074761 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.079377 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gtxqn"/"default-dockercfg-qvpl2" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.079533 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gtxqn"/"kube-root-ca.crt" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.079651 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gtxqn"/"openshift-service-ca.crt" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.082646 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gtxqn/must-gather-7npgj"] Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.268950 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v682f\" (UniqueName: \"kubernetes.io/projected/8b9e5cb3-60c7-4e05-8150-49dc526a5700-kube-api-access-v682f\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.269100 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8b9e5cb3-60c7-4e05-8150-49dc526a5700-must-gather-output\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.371246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v682f\" (UniqueName: \"kubernetes.io/projected/8b9e5cb3-60c7-4e05-8150-49dc526a5700-kube-api-access-v682f\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.371464 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8b9e5cb3-60c7-4e05-8150-49dc526a5700-must-gather-output\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.371887 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8b9e5cb3-60c7-4e05-8150-49dc526a5700-must-gather-output\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.391892 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v682f\" (UniqueName: \"kubernetes.io/projected/8b9e5cb3-60c7-4e05-8150-49dc526a5700-kube-api-access-v682f\") pod \"must-gather-7npgj\" (UID: \"8b9e5cb3-60c7-4e05-8150-49dc526a5700\") " pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.399703 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/must-gather-7npgj" Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.869030 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gtxqn/must-gather-7npgj"] Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.872792 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 07:57:23 crc kubenswrapper[4982]: I0122 07:57:23.934808 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/must-gather-7npgj" event={"ID":"8b9e5cb3-60c7-4e05-8150-49dc526a5700","Type":"ContainerStarted","Data":"c37be6cb17b18b94efdca792c306581d4dbf58a06af982306f3b3ffa85d02c32"} Jan 22 07:57:34 crc kubenswrapper[4982]: I0122 07:57:34.068583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/must-gather-7npgj" event={"ID":"8b9e5cb3-60c7-4e05-8150-49dc526a5700","Type":"ContainerStarted","Data":"6da092500a4f9c929a1dc40765b006482e67f89cffcab487873a7ca6120a0eee"} Jan 22 07:57:34 crc kubenswrapper[4982]: I0122 07:57:34.069316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/must-gather-7npgj" event={"ID":"8b9e5cb3-60c7-4e05-8150-49dc526a5700","Type":"ContainerStarted","Data":"5d384a6d9ad770c6a14eb52615014c7712d996607aab689a6f2d419415c7831c"} Jan 22 07:57:34 crc kubenswrapper[4982]: I0122 07:57:34.103470 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gtxqn/must-gather-7npgj" podStartSLOduration=2.155789305 podStartE2EDuration="11.103449164s" podCreationTimestamp="2026-01-22 07:57:23 +0000 UTC" firstStartedPulling="2026-01-22 07:57:23.872572516 +0000 UTC m=+7904.711210519" lastFinishedPulling="2026-01-22 07:57:32.820232335 +0000 UTC m=+7913.658870378" observedRunningTime="2026-01-22 07:57:34.09078976 +0000 UTC m=+7914.929427773" watchObservedRunningTime="2026-01-22 07:57:34.103449164 +0000 UTC m=+7914.942087177" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.184037 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-8z9hv"] Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.186664 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.331158 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.331514 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4656z\" (UniqueName: \"kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.433946 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4656z\" (UniqueName: \"kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.434123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.434238 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.452456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4656z\" (UniqueName: \"kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z\") pod \"crc-debug-8z9hv\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:38 crc kubenswrapper[4982]: I0122 07:57:38.509542 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:57:39 crc kubenswrapper[4982]: I0122 07:57:39.128498 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" event={"ID":"b13456a5-efa6-4576-9dea-05577a6d6ba7","Type":"ContainerStarted","Data":"0c9e86404554053095309e30bf13ef25ad2bfe22852b7065c832c8bf01d80752"} Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.836116 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4b519ad5-12f9-4681-851d-a7da821a20f2/alertmanager/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.842023 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4b519ad5-12f9-4681-851d-a7da821a20f2/config-reloader/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.857037 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4b519ad5-12f9-4681-851d-a7da821a20f2/init-config-reloader/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.885842 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_8a8a4564-d7d6-4801-8bdd-e4554fcf08e0/aodh-api/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.900188 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_8a8a4564-d7d6-4801-8bdd-e4554fcf08e0/aodh-evaluator/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.905747 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_8a8a4564-d7d6-4801-8bdd-e4554fcf08e0/aodh-notifier/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.915965 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_8a8a4564-d7d6-4801-8bdd-e4554fcf08e0/aodh-listener/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.953077 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-856577cd68-rkqcj_1be85ddb-0864-4dc7-bd72-8f3cc1443bd5/barbican-api-log/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.965214 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-856577cd68-rkqcj_1be85ddb-0864-4dc7-bd72-8f3cc1443bd5/barbican-api/0.log" Jan 22 07:57:40 crc kubenswrapper[4982]: I0122 07:57:40.996357 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-775476cb98-9sq5r_8741cfe8-0ee7-41ac-9740-b58be69eb376/barbican-keystone-listener-log/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.003589 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-775476cb98-9sq5r_8741cfe8-0ee7-41ac-9740-b58be69eb376/barbican-keystone-listener/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.030837 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d5cd9b87c-dt272_ceadad66-b472-4bf5-ba62-2d91e7eb8b9f/barbican-worker-log/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.047794 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d5cd9b87c-dt272_ceadad66-b472-4bf5-ba62-2d91e7eb8b9f/barbican-worker/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.092816 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-98c2s_961e9f4d-7835-4d04-bdde-dbcdb6f54c2b/bootstrap-openstack-openstack-cell1/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.125114 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_34fb3c90-dfaf-44b1-a53a-b07bca95e3b3/ceilometer-central-agent/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.147969 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_34fb3c90-dfaf-44b1-a53a-b07bca95e3b3/ceilometer-notification-agent/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.157062 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_34fb3c90-dfaf-44b1-a53a-b07bca95e3b3/sg-core/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.170962 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_34fb3c90-dfaf-44b1-a53a-b07bca95e3b3/proxy-httpd/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.316802 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_333f6482-96da-4fae-889a-b4fb57b0b089/cinder-api-log/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.386950 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_333f6482-96da-4fae-889a-b4fb57b0b089/cinder-api/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.555024 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_bbb464cf-fa45-4bec-b621-87a818a02156/cinder-backup/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.638372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_bbb464cf-fa45-4bec-b621-87a818a02156/probe/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.685035 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_1e736e92-0f22-4a90-bc7a-7110b5d234a3/cinder-scheduler/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.744821 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_1e736e92-0f22-4a90-bc7a-7110b5d234a3/probe/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.826550 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_d12a7451-3edb-4f27-ac05-9bf90e55a4c8/cinder-volume/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.930657 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_d12a7451-3edb-4f27-ac05-9bf90e55a4c8/probe/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.951078 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-zbmlr_1f1ea1bc-d8d8-4cfb-93bf-86884d573976/configure-network-openstack-openstack-cell1/0.log" Jan 22 07:57:41 crc kubenswrapper[4982]: I0122 07:57:41.973950 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-4q4lz_6549173b-9ad0-4d54-b384-8eec92047777/configure-os-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.002227 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-c8gjr_2b3e9d65-16d2-49f1-8953-84799f33a04a/configure-os-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.023604 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-ntxdv_4349a4d4-0264-4986-b1ba-02ee440c913b/configure-os-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.047391 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-tdn6j_ce78aa73-e7c6-49e6-ab7f-feea739308b3/configure-os-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.059078 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79c985c4bf-l49md_00dfc991-c4f4-48d0-8cba-e45257f5fbf9/dnsmasq-dns/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.063734 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79c985c4bf-l49md_00dfc991-c4f4-48d0-8cba-e45257f5fbf9/init/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.093423 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-68mv5_aaff985b-bf0d-4d70-90d3-4cac37e68578/download-cache-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.103294 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_6b55cf19-d25f-4825-b61e-78123b248e23/glance-log/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.121087 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_6b55cf19-d25f-4825-b61e-78123b248e23/glance-httpd/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.133411 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_61476283-6be6-45db-b4dd-e4cb19492799/glance-log/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.154204 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_61476283-6be6-45db-b4dd-e4cb19492799/glance-httpd/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.228344 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-5bf47d4589-c46dt_f1905031-b59c-4a3d-86a1-356d3e836819/heat-api/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.291054 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-7b7b667b8-cwq46_a02ed1aa-0801-4568-b3dd-e6b12107f7e4/heat-cfnapi/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.306673 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-6dd64c5f44-z9ffh_d2e332c1-9410-467f-9149-368f1c250c4a/heat-engine/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.347589 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5cb8668ff7-jmj7z_3e687c53-388f-46e5-ae9e-b42960a66206/horizon-log/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.431770 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5cb8668ff7-jmj7z_3e687c53-388f-46e5-ae9e-b42960a66206/horizon/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.475306 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-gpklq_6fc5f931-2ac0-4cf7-9938-532c8e526580/install-os-openstack-openstack-cell1/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.664230 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-757b4696-655wn_47389627-50a0-42af-b9df-efef6b63429f/keystone-api/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.671779 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e4b2d188-afb5-4d60-8376-b187161abe16/kube-state-metrics/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.683802 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_630f9011-553f-445e-97a7-35448f3bf607/manila-api-log/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.777244 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_630f9011-553f-445e-97a7-35448f3bf607/manila-api/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.864468 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3/manila-scheduler/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.872403 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_5e3e77b2-4dfc-4461-a0d9-c3ddc906e4f3/probe/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.908946 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_39694168-7dd0-449e-b3f7-fdc47ba2689e/manila-share/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.917188 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_39694168-7dd0-449e-b3f7-fdc47ba2689e/probe/0.log" Jan 22 07:57:42 crc kubenswrapper[4982]: I0122 07:57:42.927309 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_7b376af2-ced4-4234-a949-f3c81e220a11/adoption/0.log" Jan 22 07:57:48 crc kubenswrapper[4982]: I0122 07:57:48.973373 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:57:48 crc kubenswrapper[4982]: I0122 07:57:48.973910 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:57:50 crc kubenswrapper[4982]: I0122 07:57:50.663348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b227581a-1612-4b75-849d-b7318781a24b/memcached/0.log" Jan 22 07:57:50 crc kubenswrapper[4982]: I0122 07:57:50.743378 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-57b5cdf6b5-f7gh7_b2809521-5cf5-49c3-bf1c-b8eefa79cd1b/neutron-api/0.log" Jan 22 07:57:50 crc kubenswrapper[4982]: I0122 07:57:50.794928 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-57b5cdf6b5-f7gh7_b2809521-5cf5-49c3-bf1c-b8eefa79cd1b/neutron-httpd/0.log" Jan 22 07:57:50 crc kubenswrapper[4982]: I0122 07:57:50.987498 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ad9cd174-57bf-44d9-b5aa-abeccc5bcac8/nova-api-log/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.069651 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_ad9cd174-57bf-44d9-b5aa-abeccc5bcac8/nova-api-api/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.157261 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_6ec5622e-1965-4e4e-ab7d-104745c8a065/nova-cell0-conductor-conductor/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.240718 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_5720f611-259b-475d-b62b-12e2121dc041/nova-cell1-conductor-conductor/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.669404 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_8db70abd-afc0-4f5b-90bc-d146bcdb076c/nova-cell1-novncproxy-novncproxy/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.732798 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_272317b4-c796-4e4f-9867-1d37a2bf3ac7/nova-metadata-log/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.836641 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_272317b4-c796-4e4f-9867-1d37a2bf3ac7/nova-metadata-metadata/0.log" Jan 22 07:57:51 crc kubenswrapper[4982]: I0122 07:57:51.915053 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e492a7ae-fef5-4822-ba8f-2d01628d8d89/nova-scheduler-scheduler/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.103237 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6747f58b9b-2xqnm_4e1c0c86-a174-408f-bc49-d4cfca199c8a/octavia-api/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.120255 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6747f58b9b-2xqnm_4e1c0c86-a174-408f-bc49-d4cfca199c8a/octavia-api-provider-agent/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.128643 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6747f58b9b-2xqnm_4e1c0c86-a174-408f-bc49-d4cfca199c8a/init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.181417 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-k7x9x_a6ef98c3-bddb-469d-b06d-7457555f44b9/octavia-healthmanager/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.196307 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-k7x9x_a6ef98c3-bddb-469d-b06d-7457555f44b9/init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.222749 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-zn9sp_e995cc53-c18b-4c88-b635-c5ea5798b026/octavia-housekeeping/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.233735 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-zn9sp_e995cc53-c18b-4c88-b635-c5ea5798b026/init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.244872 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-5jnkz_4475df5c-51e9-47ec-9ff8-6030b5b7377e/octavia-rsyslog/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.255323 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-5jnkz_4475df5c-51e9-47ec-9ff8-6030b5b7377e/init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.298520 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" event={"ID":"b13456a5-efa6-4576-9dea-05577a6d6ba7","Type":"ContainerStarted","Data":"06bbadccfbfa508b88145f8e0b737df9e9d4f7f6d076a61eab483cf27c7c8ea4"} Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.315011 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" podStartSLOduration=1.110456239 podStartE2EDuration="14.314992369s" podCreationTimestamp="2026-01-22 07:57:38 +0000 UTC" firstStartedPulling="2026-01-22 07:57:38.581399874 +0000 UTC m=+7919.420037877" lastFinishedPulling="2026-01-22 07:57:51.785936004 +0000 UTC m=+7932.624574007" observedRunningTime="2026-01-22 07:57:52.310365054 +0000 UTC m=+7933.149003057" watchObservedRunningTime="2026-01-22 07:57:52.314992369 +0000 UTC m=+7933.153630372" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.387630 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-9swd4_ce14677e-7c96-4c5c-a9a8-76747e002f27/octavia-worker/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.395329 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-9swd4_ce14677e-7c96-4c5c-a9a8-76747e002f27/init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.417526 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0/galera/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.432830 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8221fbb5-ceb8-43a9-b0b8-5ee893afc8f0/mysql-bootstrap/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.467832 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_02fcef15-a945-4200-8b5f-9697d0a2695f/galera/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.478749 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_02fcef15-a945-4200-8b5f-9697d0a2695f/mysql-bootstrap/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.488662 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a473cc16-7761-429e-b28a-b2d502832ef9/openstackclient/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.498616 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tzznv_060cb2fa-6324-4520-a00d-6345ef4126fb/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.517095 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qmkb2_da84874a-a3bf-4585-b0ab-08ac6143cd32/ovsdb-server/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.532755 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qmkb2_da84874a-a3bf-4585-b0ab-08ac6143cd32/ovs-vswitchd/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.541554 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qmkb2_da84874a-a3bf-4585-b0ab-08ac6143cd32/ovsdb-server-init/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.554203 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-twjrm_4b1018e3-51c0-493b-b135-aac6ea8d246c/ovn-controller/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.563951 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_219b433b-fe06-4df4-856a-faf3851da20f/adoption/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.579080 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_63d7dcb7-9570-45b9-8b62-46af00032f19/ovn-northd/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.587060 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_63d7dcb7-9570-45b9-8b62-46af00032f19/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.601464 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_002ade31-2880-47c6-9ed7-5bb12d2bcda9/ovsdbserver-nb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.605539 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_002ade31-2880-47c6-9ed7-5bb12d2bcda9/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.622594 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5/ovsdbserver-nb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.627179 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_79ec12c1-c68b-4ca4-8ac1-3a3ef740eda5/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.645576 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_bded9234-3e82-419e-82b4-a43809301833/ovsdbserver-nb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.653258 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_bded9234-3e82-419e-82b4-a43809301833/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.677766 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d077ea28-6c68-4940-b841-7d113bcd8394/ovsdbserver-sb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.684069 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d077ea28-6c68-4940-b841-7d113bcd8394/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.704769 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_895d6b93-d214-4982-9246-43d77890c2f3/ovsdbserver-sb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.714842 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_895d6b93-d214-4982-9246-43d77890c2f3/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.734543 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_ef8a87c1-3a5f-4159-af3f-f854b1b16ef6/ovsdbserver-sb/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.739506 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_ef8a87c1-3a5f-4159-af3f-f854b1b16ef6/openstack-network-exporter/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.779385 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85d8bfd68d-h9vnb_733b14c4-1832-4215-8748-228dd7e5ceab/placement-log/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.806725 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85d8bfd68d-h9vnb_733b14c4-1832-4215-8748-228dd7e5ceab/placement-api/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.822179 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cmxq6f_d2cafe5d-fd32-4e7b-befb-77bb775043da/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.841319 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a4d1e87c-2e5d-4f07-b5aa-f56d61651468/prometheus/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.850934 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a4d1e87c-2e5d-4f07-b5aa-f56d61651468/config-reloader/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.857789 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a4d1e87c-2e5d-4f07-b5aa-f56d61651468/thanos-sidecar/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.867077 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a4d1e87c-2e5d-4f07-b5aa-f56d61651468/init-config-reloader/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.889428 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7214d601-3cd6-4365-981f-2d254e99d620/rabbitmq/0.log" Jan 22 07:57:52 crc kubenswrapper[4982]: I0122 07:57:52.894648 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7214d601-3cd6-4365-981f-2d254e99d620/setup-container/0.log" Jan 22 07:57:53 crc kubenswrapper[4982]: I0122 07:57:53.166507 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0e26bf89-f462-40be-8b10-8fb6e1507bf8/rabbitmq/0.log" Jan 22 07:57:53 crc kubenswrapper[4982]: I0122 07:57:53.171996 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_0e26bf89-f462-40be-8b10-8fb6e1507bf8/setup-container/0.log" Jan 22 07:57:53 crc kubenswrapper[4982]: I0122 07:57:53.301960 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-hmjhp_a0001918-c9c5-4f77-8d8a-c0021c280883/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Jan 22 07:57:53 crc kubenswrapper[4982]: I0122 07:57:53.313136 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-zcf8c_8be015a3-c616-41d3-af20-ffcfb571653a/validate-network-openstack-openstack-cell1/0.log" Jan 22 07:58:05 crc kubenswrapper[4982]: I0122 07:58:05.962677 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/controller/0.log" Jan 22 07:58:05 crc kubenswrapper[4982]: I0122 07:58:05.976732 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/kube-rbac-proxy/0.log" Jan 22 07:58:06 crc kubenswrapper[4982]: I0122 07:58:06.007945 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/controller/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.363644 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.374823 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/reloader/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.379759 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr-metrics/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.387166 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.396141 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy-frr/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.401490 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-frr-files/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.408372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-reloader/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.416273 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-metrics/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.427152 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-5f6hr_14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa/frr-k8s-webhook-server/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.456022 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5bc8cc58c6-wjczj_44543400-72e1-4ae7-976e-901b6ebac4b2/manager/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.467831 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-579c7888b9-9pz77_2b28eec2-730e-422f-b384-8b60cb00a45f/webhook-server/0.log" Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.472099 4982 generic.go:334] "Generic (PLEG): container finished" podID="b13456a5-efa6-4576-9dea-05577a6d6ba7" containerID="06bbadccfbfa508b88145f8e0b737df9e9d4f7f6d076a61eab483cf27c7c8ea4" exitCode=0 Jan 22 07:58:09 crc kubenswrapper[4982]: I0122 07:58:09.472139 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" event={"ID":"b13456a5-efa6-4576-9dea-05577a6d6ba7","Type":"ContainerDied","Data":"06bbadccfbfa508b88145f8e0b737df9e9d4f7f6d076a61eab483cf27c7c8ea4"} Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.072235 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/speaker/0.log" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.078104 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/kube-rbac-proxy/0.log" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.613528 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.656474 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-8z9hv"] Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.666633 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-8z9hv"] Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.675060 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4656z\" (UniqueName: \"kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z\") pod \"b13456a5-efa6-4576-9dea-05577a6d6ba7\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.675151 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host\") pod \"b13456a5-efa6-4576-9dea-05577a6d6ba7\" (UID: \"b13456a5-efa6-4576-9dea-05577a6d6ba7\") " Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.675249 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host" (OuterVolumeSpecName: "host") pod "b13456a5-efa6-4576-9dea-05577a6d6ba7" (UID: "b13456a5-efa6-4576-9dea-05577a6d6ba7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.675741 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b13456a5-efa6-4576-9dea-05577a6d6ba7-host\") on node \"crc\" DevicePath \"\"" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.685245 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z" (OuterVolumeSpecName: "kube-api-access-4656z") pod "b13456a5-efa6-4576-9dea-05577a6d6ba7" (UID: "b13456a5-efa6-4576-9dea-05577a6d6ba7"). InnerVolumeSpecName "kube-api-access-4656z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:58:10 crc kubenswrapper[4982]: I0122 07:58:10.777196 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4656z\" (UniqueName: \"kubernetes.io/projected/b13456a5-efa6-4576-9dea-05577a6d6ba7-kube-api-access-4656z\") on node \"crc\" DevicePath \"\"" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.496040 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c9e86404554053095309e30bf13ef25ad2bfe22852b7065c832c8bf01d80752" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.496088 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-8z9hv" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.729683 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b13456a5-efa6-4576-9dea-05577a6d6ba7" path="/var/lib/kubelet/pods/b13456a5-efa6-4576-9dea-05577a6d6ba7/volumes" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.842640 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-gj8rd"] Jan 22 07:58:11 crc kubenswrapper[4982]: E0122 07:58:11.843175 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b13456a5-efa6-4576-9dea-05577a6d6ba7" containerName="container-00" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.843195 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b13456a5-efa6-4576-9dea-05577a6d6ba7" containerName="container-00" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.843463 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b13456a5-efa6-4576-9dea-05577a6d6ba7" containerName="container-00" Jan 22 07:58:11 crc kubenswrapper[4982]: I0122 07:58:11.844644 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.003795 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.004011 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9ftt\" (UniqueName: \"kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.105842 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.106241 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9ftt\" (UniqueName: \"kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.106035 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.135577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9ftt\" (UniqueName: \"kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt\") pod \"crc-debug-gj8rd\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.162761 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.505964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" event={"ID":"32476bfd-d76d-4f89-8db4-1c04385f141e","Type":"ContainerStarted","Data":"63a2f9c61b22229f6957d5d64f8bc802f7c7003b5033c74b8ca2ad5fd0466703"} Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.507347 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" event={"ID":"32476bfd-d76d-4f89-8db4-1c04385f141e","Type":"ContainerStarted","Data":"2bc63dba8d7989ac6493fc5707768f05179f68b731c129bd1877fd9718ce1a4e"} Jan 22 07:58:12 crc kubenswrapper[4982]: I0122 07:58:12.580726 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" podStartSLOduration=1.5807004820000001 podStartE2EDuration="1.580700482s" podCreationTimestamp="2026-01-22 07:58:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 07:58:12.566874497 +0000 UTC m=+7953.405512500" watchObservedRunningTime="2026-01-22 07:58:12.580700482 +0000 UTC m=+7953.419338495" Jan 22 07:58:13 crc kubenswrapper[4982]: I0122 07:58:13.519864 4982 generic.go:334] "Generic (PLEG): container finished" podID="32476bfd-d76d-4f89-8db4-1c04385f141e" containerID="63a2f9c61b22229f6957d5d64f8bc802f7c7003b5033c74b8ca2ad5fd0466703" exitCode=1 Jan 22 07:58:13 crc kubenswrapper[4982]: I0122 07:58:13.519978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" event={"ID":"32476bfd-d76d-4f89-8db4-1c04385f141e","Type":"ContainerDied","Data":"63a2f9c61b22229f6957d5d64f8bc802f7c7003b5033c74b8ca2ad5fd0466703"} Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.637795 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.677294 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-gj8rd"] Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.686572 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gtxqn/crc-debug-gj8rd"] Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.765411 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host\") pod \"32476bfd-d76d-4f89-8db4-1c04385f141e\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.765700 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9ftt\" (UniqueName: \"kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt\") pod \"32476bfd-d76d-4f89-8db4-1c04385f141e\" (UID: \"32476bfd-d76d-4f89-8db4-1c04385f141e\") " Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.767116 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host" (OuterVolumeSpecName: "host") pod "32476bfd-d76d-4f89-8db4-1c04385f141e" (UID: "32476bfd-d76d-4f89-8db4-1c04385f141e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.772482 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt" (OuterVolumeSpecName: "kube-api-access-x9ftt") pod "32476bfd-d76d-4f89-8db4-1c04385f141e" (UID: "32476bfd-d76d-4f89-8db4-1c04385f141e"). InnerVolumeSpecName "kube-api-access-x9ftt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.868465 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/32476bfd-d76d-4f89-8db4-1c04385f141e-host\") on node \"crc\" DevicePath \"\"" Jan 22 07:58:14 crc kubenswrapper[4982]: I0122 07:58:14.868751 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9ftt\" (UniqueName: \"kubernetes.io/projected/32476bfd-d76d-4f89-8db4-1c04385f141e-kube-api-access-x9ftt\") on node \"crc\" DevicePath \"\"" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.544446 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bc63dba8d7989ac6493fc5707768f05179f68b731c129bd1877fd9718ce1a4e" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.544777 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gtxqn/crc-debug-gj8rd" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.603402 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/extract/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.615522 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/util/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.626379 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/pull/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.732494 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-7rhfh_18f9bc75-69ce-4299-ab4a-c280781b056c/manager/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.735060 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32476bfd-d76d-4f89-8db4-1c04385f141e" path="/var/lib/kubelet/pods/32476bfd-d76d-4f89-8db4-1c04385f141e/volumes" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.807627 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-22wjh_a7fe3154-ef99-4ce9-9151-605f734269f1/manager/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.818679 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-hstbj_ca35f48e-eddd-46f2-8369-f1e642432834/manager/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.954200 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-hjbqs_e884b772-f17f-410c-9a2a-1b87fcda735b/manager/0.log" Jan 22 07:58:15 crc kubenswrapper[4982]: I0122 07:58:15.990698 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-f4l4h_bca8ffde-e486-4000-8d0b-39a275b64803/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.012840 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-kxwmj_ef693655-09f3-4809-a4b2-8930551fb3f1/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.558293 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-6zh6c_84a649d7-4c8c-4da7-93b3-9e537b0207ee/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.569984 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7zk22_a1825b14-129b-459a-b08b-7e62c7f2414a/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.679144 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-vkn5h_d6afaf94-bf6d-4559-ab50-c2320aade035/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.708949 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-t276t_e63b183e-a84a-4964-a082-5f7768d8c472/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.773787 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-tklld_6d6c68f5-4111-471b-875c-0d498c4b046d/manager/0.log" Jan 22 07:58:16 crc kubenswrapper[4982]: I0122 07:58:16.849594 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-6pr97_99845353-4c2d-4caa-bf12-c396d6b91a82/manager/0.log" Jan 22 07:58:17 crc kubenswrapper[4982]: I0122 07:58:17.027093 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-rk69w_7cbf8f35-c5cf-4e1c-8718-023380d9ac26/manager/0.log" Jan 22 07:58:17 crc kubenswrapper[4982]: I0122 07:58:17.079755 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-f44cg_4c78936d-aa34-45c2-8e85-67f6de306d0f/manager/0.log" Jan 22 07:58:17 crc kubenswrapper[4982]: I0122 07:58:17.097447 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g_47ed3df3-a23e-4021-b786-b99d1b710639/manager/0.log" Jan 22 07:58:17 crc kubenswrapper[4982]: I0122 07:58:17.289105 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-5cd76577f9-ghpnv_4423d43b-5bd0-4e58-adc1-b428a3254273/operator/0.log" Jan 22 07:58:18 crc kubenswrapper[4982]: I0122 07:58:18.987250 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 07:58:18 crc kubenswrapper[4982]: I0122 07:58:18.987802 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 07:58:18 crc kubenswrapper[4982]: I0122 07:58:18.987842 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 07:58:18 crc kubenswrapper[4982]: I0122 07:58:18.988661 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 07:58:18 crc kubenswrapper[4982]: I0122 07:58:18.988714 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7" gracePeriod=600 Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.443777 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-647bb87bbd-fvz54_d6294b08-62b7-465e-a5af-08f9bf1e5ff8/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.553102 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cvnbh_2e0bb805-5881-4548-aebd-0924b927cbdd/registry-server/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.625514 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7" exitCode=0 Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.625799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7"} Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.625832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724"} Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.625900 4982 scope.go:117] "RemoveContainer" containerID="4292384f8792d1496c63f3056a29f7893d807144bd86466effcdeb4f3e397538" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.677665 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-284c4_099ae039-177c-4335-a611-990dcdf9c655/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.712811 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-9nfxp_1fa2993a-2231-445d-aa77-7190fb3a8fcb/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.741140 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4dbcc_bd17bdc3-15ba-47d5-88f9-56336faa71bf/operator/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.777043 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-qgxkb_3bde3f49-9b8e-4cb1-8eac-6eb047eda094/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.907572 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-jmsqf_030e0626-7169-45ea-9981-78c910b04226/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.917118 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-k7ffz_afdd5f3a-706e-4f4f-930e-c952e4b0c6dc/manager/0.log" Jan 22 07:58:19 crc kubenswrapper[4982]: I0122 07:58:19.930364 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-prb7h_8b31e08d-b4f8-482f-b413-25897c734299/manager/0.log" Jan 22 07:58:22 crc kubenswrapper[4982]: I0122 07:58:22.882558 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6zct7_1001b154-4839-4c44-a79b-2be8fcbfb706/control-plane-machine-set-operator/0.log" Jan 22 07:58:22 crc kubenswrapper[4982]: I0122 07:58:22.897307 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wb9ck_3a5a9a72-e46b-4794-8141-04583a99a97a/kube-rbac-proxy/0.log" Jan 22 07:58:22 crc kubenswrapper[4982]: I0122 07:58:22.908516 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wb9ck_3a5a9a72-e46b-4794-8141-04583a99a97a/machine-api-operator/0.log" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.594735 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 07:59:43 crc kubenswrapper[4982]: E0122 07:59:43.596023 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32476bfd-d76d-4f89-8db4-1c04385f141e" containerName="container-00" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.596041 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="32476bfd-d76d-4f89-8db4-1c04385f141e" containerName="container-00" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.596308 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="32476bfd-d76d-4f89-8db4-1c04385f141e" containerName="container-00" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.598474 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.616685 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.652756 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.652910 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7rsz\" (UniqueName: \"kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.652988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.755417 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.755694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.755803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7rsz\" (UniqueName: \"kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.756684 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.757300 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.784031 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7rsz\" (UniqueName: \"kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz\") pod \"redhat-operators-xvbh2\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:43 crc kubenswrapper[4982]: I0122 07:59:43.923699 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:44 crc kubenswrapper[4982]: I0122 07:59:44.440666 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 07:59:44 crc kubenswrapper[4982]: W0122 07:59:44.442589 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09dededd_3bc6_417c_8d16_40d6daa51049.slice/crio-6c0808588f201e36f96f0ab1de953c10c8c12142cdd4d65e277415209460f08b WatchSource:0}: Error finding container 6c0808588f201e36f96f0ab1de953c10c8c12142cdd4d65e277415209460f08b: Status 404 returned error can't find the container with id 6c0808588f201e36f96f0ab1de953c10c8c12142cdd4d65e277415209460f08b Jan 22 07:59:44 crc kubenswrapper[4982]: I0122 07:59:44.510904 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerStarted","Data":"6c0808588f201e36f96f0ab1de953c10c8c12142cdd4d65e277415209460f08b"} Jan 22 07:59:45 crc kubenswrapper[4982]: I0122 07:59:45.523069 4982 generic.go:334] "Generic (PLEG): container finished" podID="09dededd-3bc6-417c-8d16-40d6daa51049" containerID="81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9" exitCode=0 Jan 22 07:59:45 crc kubenswrapper[4982]: I0122 07:59:45.523139 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerDied","Data":"81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9"} Jan 22 07:59:47 crc kubenswrapper[4982]: I0122 07:59:47.546713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerStarted","Data":"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b"} Jan 22 07:59:51 crc kubenswrapper[4982]: I0122 07:59:51.595653 4982 generic.go:334] "Generic (PLEG): container finished" podID="09dededd-3bc6-417c-8d16-40d6daa51049" containerID="fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b" exitCode=0 Jan 22 07:59:51 crc kubenswrapper[4982]: I0122 07:59:51.595771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerDied","Data":"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b"} Jan 22 07:59:52 crc kubenswrapper[4982]: I0122 07:59:52.609536 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerStarted","Data":"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518"} Jan 22 07:59:52 crc kubenswrapper[4982]: I0122 07:59:52.632394 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xvbh2" podStartSLOduration=2.9466632759999998 podStartE2EDuration="9.632373917s" podCreationTimestamp="2026-01-22 07:59:43 +0000 UTC" firstStartedPulling="2026-01-22 07:59:45.525200716 +0000 UTC m=+8046.363838719" lastFinishedPulling="2026-01-22 07:59:52.210911317 +0000 UTC m=+8053.049549360" observedRunningTime="2026-01-22 07:59:52.627679499 +0000 UTC m=+8053.466317522" watchObservedRunningTime="2026-01-22 07:59:52.632373917 +0000 UTC m=+8053.471011940" Jan 22 07:59:53 crc kubenswrapper[4982]: I0122 07:59:53.924723 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:53 crc kubenswrapper[4982]: I0122 07:59:53.925125 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 07:59:54 crc kubenswrapper[4982]: I0122 07:59:54.979148 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xvbh2" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="registry-server" probeResult="failure" output=< Jan 22 07:59:54 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 07:59:54 crc kubenswrapper[4982]: > Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.172246 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72"] Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.174508 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.177252 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.177823 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.182819 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72"] Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.308136 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.308432 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2hls\" (UniqueName: \"kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.308602 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.410951 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.411003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2hls\" (UniqueName: \"kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.411064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.412896 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.418087 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.427603 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2hls\" (UniqueName: \"kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls\") pod \"collect-profiles-29484480-tgw72\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.498951 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:00 crc kubenswrapper[4982]: I0122 08:00:00.972193 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72"] Jan 22 08:00:01 crc kubenswrapper[4982]: I0122 08:00:01.709556 4982 generic.go:334] "Generic (PLEG): container finished" podID="f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" containerID="89238474789a57e4783d160c7fcfffaed698443f1f53bf0e02f6f74abf9e63ca" exitCode=0 Jan 22 08:00:01 crc kubenswrapper[4982]: I0122 08:00:01.709614 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" event={"ID":"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4","Type":"ContainerDied","Data":"89238474789a57e4783d160c7fcfffaed698443f1f53bf0e02f6f74abf9e63ca"} Jan 22 08:00:01 crc kubenswrapper[4982]: I0122 08:00:01.711421 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" event={"ID":"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4","Type":"ContainerStarted","Data":"317952be639cabd9f2c016760514b730839564bd56ebea42b2db681a404a50dc"} Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.110420 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.276496 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume\") pod \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.276568 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume\") pod \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.276659 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2hls\" (UniqueName: \"kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls\") pod \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\" (UID: \"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4\") " Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.277575 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume" (OuterVolumeSpecName: "config-volume") pod "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" (UID: "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.282949 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls" (OuterVolumeSpecName: "kube-api-access-g2hls") pod "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" (UID: "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4"). InnerVolumeSpecName "kube-api-access-g2hls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.295534 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" (UID: "f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.379403 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.379439 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.379453 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2hls\" (UniqueName: \"kubernetes.io/projected/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4-kube-api-access-g2hls\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.732600 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" event={"ID":"f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4","Type":"ContainerDied","Data":"317952be639cabd9f2c016760514b730839564bd56ebea42b2db681a404a50dc"} Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.734648 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="317952be639cabd9f2c016760514b730839564bd56ebea42b2db681a404a50dc" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.732689 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72" Jan 22 08:00:03 crc kubenswrapper[4982]: I0122 08:00:03.978231 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 08:00:04 crc kubenswrapper[4982]: I0122 08:00:04.031046 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 08:00:04 crc kubenswrapper[4982]: I0122 08:00:04.187359 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8"] Jan 22 08:00:04 crc kubenswrapper[4982]: I0122 08:00:04.197704 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484435-wtgn8"] Jan 22 08:00:04 crc kubenswrapper[4982]: I0122 08:00:04.329047 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 08:00:05 crc kubenswrapper[4982]: I0122 08:00:05.731977 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bf985b4-36db-45ac-9483-052d61149d7f" path="/var/lib/kubelet/pods/4bf985b4-36db-45ac-9483-052d61149d7f/volumes" Jan 22 08:00:05 crc kubenswrapper[4982]: I0122 08:00:05.748172 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xvbh2" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="registry-server" containerID="cri-o://05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518" gracePeriod=2 Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.221441 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.342711 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities\") pod \"09dededd-3bc6-417c-8d16-40d6daa51049\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.342829 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7rsz\" (UniqueName: \"kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz\") pod \"09dededd-3bc6-417c-8d16-40d6daa51049\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.343019 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content\") pod \"09dededd-3bc6-417c-8d16-40d6daa51049\" (UID: \"09dededd-3bc6-417c-8d16-40d6daa51049\") " Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.343809 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities" (OuterVolumeSpecName: "utilities") pod "09dededd-3bc6-417c-8d16-40d6daa51049" (UID: "09dededd-3bc6-417c-8d16-40d6daa51049"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.349163 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz" (OuterVolumeSpecName: "kube-api-access-h7rsz") pod "09dededd-3bc6-417c-8d16-40d6daa51049" (UID: "09dededd-3bc6-417c-8d16-40d6daa51049"). InnerVolumeSpecName "kube-api-access-h7rsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.445835 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.445888 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7rsz\" (UniqueName: \"kubernetes.io/projected/09dededd-3bc6-417c-8d16-40d6daa51049-kube-api-access-h7rsz\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.488635 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09dededd-3bc6-417c-8d16-40d6daa51049" (UID: "09dededd-3bc6-417c-8d16-40d6daa51049"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.547501 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09dededd-3bc6-417c-8d16-40d6daa51049-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.760386 4982 generic.go:334] "Generic (PLEG): container finished" podID="09dededd-3bc6-417c-8d16-40d6daa51049" containerID="05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518" exitCode=0 Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.760480 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerDied","Data":"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518"} Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.760725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvbh2" event={"ID":"09dededd-3bc6-417c-8d16-40d6daa51049","Type":"ContainerDied","Data":"6c0808588f201e36f96f0ab1de953c10c8c12142cdd4d65e277415209460f08b"} Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.760750 4982 scope.go:117] "RemoveContainer" containerID="05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.760493 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvbh2" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.791077 4982 scope.go:117] "RemoveContainer" containerID="fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.801396 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.814761 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xvbh2"] Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.816205 4982 scope.go:117] "RemoveContainer" containerID="81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.861962 4982 scope.go:117] "RemoveContainer" containerID="05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518" Jan 22 08:00:06 crc kubenswrapper[4982]: E0122 08:00:06.862384 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518\": container with ID starting with 05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518 not found: ID does not exist" containerID="05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.862423 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518"} err="failed to get container status \"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518\": rpc error: code = NotFound desc = could not find container \"05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518\": container with ID starting with 05408b0824f8e14a12e3a05fac595838eb7431de2b3486b6eb8e7221765b3518 not found: ID does not exist" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.862452 4982 scope.go:117] "RemoveContainer" containerID="fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b" Jan 22 08:00:06 crc kubenswrapper[4982]: E0122 08:00:06.862711 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b\": container with ID starting with fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b not found: ID does not exist" containerID="fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.862754 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b"} err="failed to get container status \"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b\": rpc error: code = NotFound desc = could not find container \"fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b\": container with ID starting with fe592e37e5024ea15720c16e38c782b71568f5f7bac673b33dbd27a62337a88b not found: ID does not exist" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.862780 4982 scope.go:117] "RemoveContainer" containerID="81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9" Jan 22 08:00:06 crc kubenswrapper[4982]: E0122 08:00:06.863821 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9\": container with ID starting with 81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9 not found: ID does not exist" containerID="81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9" Jan 22 08:00:06 crc kubenswrapper[4982]: I0122 08:00:06.863934 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9"} err="failed to get container status \"81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9\": rpc error: code = NotFound desc = could not find container \"81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9\": container with ID starting with 81c44c2c648897a355012b6318627e4b1158ff9fd0516c9dcda4820bcfcbfbb9 not found: ID does not exist" Jan 22 08:00:07 crc kubenswrapper[4982]: I0122 08:00:07.731042 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" path="/var/lib/kubelet/pods/09dededd-3bc6-417c-8d16-40d6daa51049/volumes" Jan 22 08:00:10 crc kubenswrapper[4982]: I0122 08:00:10.061162 4982 scope.go:117] "RemoveContainer" containerID="0507c399863ce1a2250627b10f426ab3b64919c3a79c12c0181e9b0efdf1a149" Jan 22 08:00:29 crc kubenswrapper[4982]: I0122 08:00:29.008815 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-644t8_97482988-28a9-41f6-90f9-8ab820afdb27/cert-manager-controller/0.log" Jan 22 08:00:29 crc kubenswrapper[4982]: I0122 08:00:29.038486 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-sgz2n_cc5f2133-2f6a-4e37-9224-c109867618ec/cert-manager-cainjector/0.log" Jan 22 08:00:29 crc kubenswrapper[4982]: I0122 08:00:29.048709 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-jbph5_aeb8c181-493c-4f85-8500-f2109edc40f7/cert-manager-webhook/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.640192 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-kg7hd_7c97357b-daac-4b3b-836c-ccd1710cb6bd/nmstate-console-plugin/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.665463 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-q2hd9_2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8/nmstate-handler/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.688423 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-gmdj6_d7966747-6b58-4d2a-b9c6-f85627f187d3/nmstate-metrics/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.698257 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-gmdj6_d7966747-6b58-4d2a-b9c6-f85627f187d3/kube-rbac-proxy/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.716286 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-nv5l8_a58871fe-63e9-47ab-8f67-b9ec61f7b14a/nmstate-operator/0.log" Jan 22 08:00:34 crc kubenswrapper[4982]: I0122 08:00:34.727533 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-d5tbt_695d8ee6-67a2-42de-a169-5b823f7dce2b/nmstate-webhook/0.log" Jan 22 08:00:40 crc kubenswrapper[4982]: I0122 08:00:40.756452 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-gm2ks_d1776698-995c-47f7-bc78-c0123382daca/prometheus-operator/0.log" Jan 22 08:00:40 crc kubenswrapper[4982]: I0122 08:00:40.773344 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-9cb44_a1faaf21-b27e-4428-af7c-d3be126a9ed7/prometheus-operator-admission-webhook/0.log" Jan 22 08:00:40 crc kubenswrapper[4982]: I0122 08:00:40.786384 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-tkv9d_0c419053-764e-456d-a3e0-86cd79f71c24/prometheus-operator-admission-webhook/0.log" Jan 22 08:00:40 crc kubenswrapper[4982]: I0122 08:00:40.841117 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-m4rnp_624f63e9-4045-48b7-9352-85b78572c5ec/operator/0.log" Jan 22 08:00:40 crc kubenswrapper[4982]: I0122 08:00:40.850639 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hvlnj_fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8/perses-operator/0.log" Jan 22 08:00:46 crc kubenswrapper[4982]: I0122 08:00:46.899059 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/controller/0.log" Jan 22 08:00:46 crc kubenswrapper[4982]: I0122 08:00:46.907839 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/kube-rbac-proxy/0.log" Jan 22 08:00:46 crc kubenswrapper[4982]: I0122 08:00:46.927723 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/controller/0.log" Jan 22 08:00:48 crc kubenswrapper[4982]: I0122 08:00:48.973418 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:00:48 crc kubenswrapper[4982]: I0122 08:00:48.974055 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.666588 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.677520 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/reloader/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.683190 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr-metrics/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.690817 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.701754 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy-frr/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.710973 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-frr-files/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.718637 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-reloader/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.734610 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-metrics/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.747917 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-5f6hr_14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa/frr-k8s-webhook-server/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.778301 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5bc8cc58c6-wjczj_44543400-72e1-4ae7-976e-901b6ebac4b2/manager/0.log" Jan 22 08:00:49 crc kubenswrapper[4982]: I0122 08:00:49.790119 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-579c7888b9-9pz77_2b28eec2-730e-422f-b384-8b60cb00a45f/webhook-server/0.log" Jan 22 08:00:50 crc kubenswrapper[4982]: I0122 08:00:50.404501 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/speaker/0.log" Jan 22 08:00:50 crc kubenswrapper[4982]: I0122 08:00:50.414063 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/kube-rbac-proxy/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.691654 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t_e7d6e3ce-1d35-44bd-9677-001c19062a90/extract/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.698961 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t_e7d6e3ce-1d35-44bd-9677-001c19062a90/util/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.729496 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931asqm8t_e7d6e3ce-1d35-44bd-9677-001c19062a90/pull/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.739122 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr_20024115-1f58-4a08-93de-543612c86368/extract/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.745956 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr_20024115-1f58-4a08-93de-543612c86368/util/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.756955 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6j4hr_20024115-1f58-4a08-93de-543612c86368/pull/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.767182 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f_b2fc3062-08c7-4f2f-afda-05b7402cd2dd/extract/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.773849 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f_b2fc3062-08c7-4f2f-afda-05b7402cd2dd/util/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.781039 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713trp9f_b2fc3062-08c7-4f2f-afda-05b7402cd2dd/pull/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.791803 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx_b9612f2b-06bd-4536-ad46-c74dfcd8d517/extract/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.801560 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx_b9612f2b-06bd-4536-ad46-c74dfcd8d517/util/0.log" Jan 22 08:00:52 crc kubenswrapper[4982]: I0122 08:00:52.815325 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0866wnx_b9612f2b-06bd-4536-ad46-c74dfcd8d517/pull/0.log" Jan 22 08:00:53 crc kubenswrapper[4982]: I0122 08:00:53.995436 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nh679_702f20a9-dc10-4e71-8ba8-1f8f180af1be/registry-server/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.001075 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nh679_702f20a9-dc10-4e71-8ba8-1f8f180af1be/extract-utilities/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.009208 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nh679_702f20a9-dc10-4e71-8ba8-1f8f180af1be/extract-content/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.236438 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dmtbp_0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2/registry-server/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.240919 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dmtbp_0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2/extract-utilities/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.247356 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dmtbp_0e5bfcba-8e81-47f0-8fe9-7607e9cbc9e2/extract-content/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.261010 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ghnt5_4f40a1f2-f50f-4898-b3d1-c2b00fd0606c/marketplace-operator/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.587771 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nqnzs_7bf4b1c6-51f5-483c-a175-d197751eeb6d/registry-server/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.591933 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nqnzs_7bf4b1c6-51f5-483c-a175-d197751eeb6d/extract-utilities/0.log" Jan 22 08:00:54 crc kubenswrapper[4982]: I0122 08:00:54.598805 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nqnzs_7bf4b1c6-51f5-483c-a175-d197751eeb6d/extract-content/0.log" Jan 22 08:00:55 crc kubenswrapper[4982]: I0122 08:00:55.712119 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2jtbg_1b6817e1-b502-4ace-aa86-0de7561ae865/registry-server/0.log" Jan 22 08:00:55 crc kubenswrapper[4982]: I0122 08:00:55.718006 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2jtbg_1b6817e1-b502-4ace-aa86-0de7561ae865/extract-utilities/0.log" Jan 22 08:00:55 crc kubenswrapper[4982]: I0122 08:00:55.724820 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2jtbg_1b6817e1-b502-4ace-aa86-0de7561ae865/extract-content/0.log" Jan 22 08:00:58 crc kubenswrapper[4982]: I0122 08:00:58.996950 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-gm2ks_d1776698-995c-47f7-bc78-c0123382daca/prometheus-operator/0.log" Jan 22 08:00:59 crc kubenswrapper[4982]: I0122 08:00:59.008290 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-9cb44_a1faaf21-b27e-4428-af7c-d3be126a9ed7/prometheus-operator-admission-webhook/0.log" Jan 22 08:00:59 crc kubenswrapper[4982]: I0122 08:00:59.025563 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-tkv9d_0c419053-764e-456d-a3e0-86cd79f71c24/prometheus-operator-admission-webhook/0.log" Jan 22 08:00:59 crc kubenswrapper[4982]: I0122 08:00:59.057220 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-m4rnp_624f63e9-4045-48b7-9352-85b78572c5ec/operator/0.log" Jan 22 08:00:59 crc kubenswrapper[4982]: I0122 08:00:59.068212 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hvlnj_fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8/perses-operator/0.log" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.157590 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29484481-qb826"] Jan 22 08:01:00 crc kubenswrapper[4982]: E0122 08:01:00.158378 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" containerName="collect-profiles" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158395 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" containerName="collect-profiles" Jan 22 08:01:00 crc kubenswrapper[4982]: E0122 08:01:00.158417 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="registry-server" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158425 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="registry-server" Jan 22 08:01:00 crc kubenswrapper[4982]: E0122 08:01:00.158438 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="extract-utilities" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158449 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="extract-utilities" Jan 22 08:01:00 crc kubenswrapper[4982]: E0122 08:01:00.158485 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="extract-content" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158493 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="extract-content" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158735 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="09dededd-3bc6-417c-8d16-40d6daa51049" containerName="registry-server" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.158764 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" containerName="collect-profiles" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.159626 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.182662 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484481-qb826"] Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.312791 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5bhq\" (UniqueName: \"kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.312893 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.312934 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.312958 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.414449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5bhq\" (UniqueName: \"kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.414586 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.414622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.414657 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.423143 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.423727 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.426116 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.431418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5bhq\" (UniqueName: \"kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq\") pod \"keystone-cron-29484481-qb826\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.476792 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:00 crc kubenswrapper[4982]: I0122 08:01:00.957212 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484481-qb826"] Jan 22 08:01:00 crc kubenswrapper[4982]: W0122 08:01:00.958950 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod238cc454_61e6_48fb_8658_8e27bedc24e8.slice/crio-ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f WatchSource:0}: Error finding container ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f: Status 404 returned error can't find the container with id ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f Jan 22 08:01:01 crc kubenswrapper[4982]: I0122 08:01:01.367989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484481-qb826" event={"ID":"238cc454-61e6-48fb-8658-8e27bedc24e8","Type":"ContainerStarted","Data":"c45d2e1d74b204cda19e73d9ea0121eab9a33e4a7a400847c6b0dd5948e0357e"} Jan 22 08:01:01 crc kubenswrapper[4982]: I0122 08:01:01.368248 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484481-qb826" event={"ID":"238cc454-61e6-48fb-8658-8e27bedc24e8","Type":"ContainerStarted","Data":"ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f"} Jan 22 08:01:01 crc kubenswrapper[4982]: I0122 08:01:01.397684 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29484481-qb826" podStartSLOduration=1.397664336 podStartE2EDuration="1.397664336s" podCreationTimestamp="2026-01-22 08:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 08:01:01.39120242 +0000 UTC m=+8122.229840443" watchObservedRunningTime="2026-01-22 08:01:01.397664336 +0000 UTC m=+8122.236302339" Jan 22 08:01:04 crc kubenswrapper[4982]: I0122 08:01:04.400733 4982 generic.go:334] "Generic (PLEG): container finished" podID="238cc454-61e6-48fb-8658-8e27bedc24e8" containerID="c45d2e1d74b204cda19e73d9ea0121eab9a33e4a7a400847c6b0dd5948e0357e" exitCode=0 Jan 22 08:01:04 crc kubenswrapper[4982]: I0122 08:01:04.400821 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484481-qb826" event={"ID":"238cc454-61e6-48fb-8658-8e27bedc24e8","Type":"ContainerDied","Data":"c45d2e1d74b204cda19e73d9ea0121eab9a33e4a7a400847c6b0dd5948e0357e"} Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.797292 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.933195 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle\") pod \"238cc454-61e6-48fb-8658-8e27bedc24e8\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.933611 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5bhq\" (UniqueName: \"kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq\") pod \"238cc454-61e6-48fb-8658-8e27bedc24e8\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.933691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data\") pod \"238cc454-61e6-48fb-8658-8e27bedc24e8\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.933802 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys\") pod \"238cc454-61e6-48fb-8658-8e27bedc24e8\" (UID: \"238cc454-61e6-48fb-8658-8e27bedc24e8\") " Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.939480 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "238cc454-61e6-48fb-8658-8e27bedc24e8" (UID: "238cc454-61e6-48fb-8658-8e27bedc24e8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.943047 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq" (OuterVolumeSpecName: "kube-api-access-d5bhq") pod "238cc454-61e6-48fb-8658-8e27bedc24e8" (UID: "238cc454-61e6-48fb-8658-8e27bedc24e8"). InnerVolumeSpecName "kube-api-access-d5bhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:01:05 crc kubenswrapper[4982]: I0122 08:01:05.969617 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "238cc454-61e6-48fb-8658-8e27bedc24e8" (UID: "238cc454-61e6-48fb-8658-8e27bedc24e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.001703 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data" (OuterVolumeSpecName: "config-data") pod "238cc454-61e6-48fb-8658-8e27bedc24e8" (UID: "238cc454-61e6-48fb-8658-8e27bedc24e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.039621 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5bhq\" (UniqueName: \"kubernetes.io/projected/238cc454-61e6-48fb-8658-8e27bedc24e8-kube-api-access-d5bhq\") on node \"crc\" DevicePath \"\"" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.039654 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.039665 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.039674 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/238cc454-61e6-48fb-8658-8e27bedc24e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.420582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484481-qb826" event={"ID":"238cc454-61e6-48fb-8658-8e27bedc24e8","Type":"ContainerDied","Data":"ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f"} Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.420626 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484481-qb826" Jan 22 08:01:06 crc kubenswrapper[4982]: I0122 08:01:06.420636 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffe35cddd78dd0e0f15866f2226102e65595f9ff5bc2a26b9bc122b19de8fc5f" Jan 22 08:01:18 crc kubenswrapper[4982]: I0122 08:01:18.974273 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:01:18 crc kubenswrapper[4982]: I0122 08:01:18.974704 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:01:48 crc kubenswrapper[4982]: I0122 08:01:48.973468 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:01:48 crc kubenswrapper[4982]: I0122 08:01:48.974084 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:01:48 crc kubenswrapper[4982]: I0122 08:01:48.974135 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:01:48 crc kubenswrapper[4982]: I0122 08:01:48.975656 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:01:48 crc kubenswrapper[4982]: I0122 08:01:48.975730 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" gracePeriod=600 Jan 22 08:01:49 crc kubenswrapper[4982]: E0122 08:01:49.220079 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:01:49 crc kubenswrapper[4982]: I0122 08:01:49.818335 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" exitCode=0 Jan 22 08:01:49 crc kubenswrapper[4982]: I0122 08:01:49.818385 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724"} Jan 22 08:01:49 crc kubenswrapper[4982]: I0122 08:01:49.818422 4982 scope.go:117] "RemoveContainer" containerID="f9d81a771d17c161adc09d5e14cec4966d777d5975a06b7fbd1e7851fd0738c7" Jan 22 08:01:49 crc kubenswrapper[4982]: I0122 08:01:49.819239 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:01:49 crc kubenswrapper[4982]: E0122 08:01:49.819566 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:00 crc kubenswrapper[4982]: I0122 08:02:00.720627 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:02:00 crc kubenswrapper[4982]: E0122 08:02:00.721333 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:14 crc kubenswrapper[4982]: I0122 08:02:14.719908 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:02:14 crc kubenswrapper[4982]: E0122 08:02:14.720696 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:27 crc kubenswrapper[4982]: I0122 08:02:27.719435 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:02:27 crc kubenswrapper[4982]: E0122 08:02:27.720409 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:42 crc kubenswrapper[4982]: I0122 08:02:42.720608 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:02:42 crc kubenswrapper[4982]: E0122 08:02:42.721413 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.128712 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-gm2ks_d1776698-995c-47f7-bc78-c0123382daca/prometheus-operator/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.145175 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-9cb44_a1faaf21-b27e-4428-af7c-d3be126a9ed7/prometheus-operator-admission-webhook/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.156562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6d86744544-tkv9d_0c419053-764e-456d-a3e0-86cd79f71c24/prometheus-operator-admission-webhook/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.177285 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-m4rnp_624f63e9-4045-48b7-9352-85b78572c5ec/operator/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.191196 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-hvlnj_fc4a27dd-6f09-4624-bde7-fdb23f8dbaa8/perses-operator/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.308940 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-644t8_97482988-28a9-41f6-90f9-8ab820afdb27/cert-manager-controller/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.331934 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-sgz2n_cc5f2133-2f6a-4e37-9224-c109867618ec/cert-manager-cainjector/0.log" Jan 22 08:02:44 crc kubenswrapper[4982]: I0122 08:02:44.342881 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-jbph5_aeb8c181-493c-4f85-8500-f2109edc40f7/cert-manager-webhook/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.133065 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/extract/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.149707 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/util/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.161246 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/pull/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.342647 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-7rhfh_18f9bc75-69ce-4299-ab4a-c280781b056c/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.498767 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-22wjh_a7fe3154-ef99-4ce9-9151-605f734269f1/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.517542 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-hstbj_ca35f48e-eddd-46f2-8369-f1e642432834/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.706399 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-hjbqs_e884b772-f17f-410c-9a2a-1b87fcda735b/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.788692 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-f4l4h_bca8ffde-e486-4000-8d0b-39a275b64803/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.814967 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-kxwmj_ef693655-09f3-4809-a4b2-8930551fb3f1/manager/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.934127 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/controller/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.941648 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-z65sv_d8de638b-d0f4-419d-8df5-d0892e3ffc6d/kube-rbac-proxy/0.log" Jan 22 08:02:45 crc kubenswrapper[4982]: I0122 08:02:45.967307 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/controller/0.log" Jan 22 08:02:46 crc kubenswrapper[4982]: I0122 08:02:46.698698 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-6zh6c_84a649d7-4c8c-4da7-93b3-9e537b0207ee/manager/0.log" Jan 22 08:02:46 crc kubenswrapper[4982]: I0122 08:02:46.717471 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7zk22_a1825b14-129b-459a-b08b-7e62c7f2414a/manager/0.log" Jan 22 08:02:46 crc kubenswrapper[4982]: I0122 08:02:46.904824 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-vkn5h_d6afaf94-bf6d-4559-ab50-c2320aade035/manager/0.log" Jan 22 08:02:46 crc kubenswrapper[4982]: I0122 08:02:46.956889 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-t276t_e63b183e-a84a-4964-a082-5f7768d8c472/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.052979 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-tklld_6d6c68f5-4111-471b-875c-0d498c4b046d/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.153995 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-6pr97_99845353-4c2d-4caa-bf12-c396d6b91a82/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.414988 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-rk69w_7cbf8f35-c5cf-4e1c-8718-023380d9ac26/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.499226 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-f44cg_4c78936d-aa34-45c2-8e85-67f6de306d0f/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.518749 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g_47ed3df3-a23e-4021-b786-b99d1b710639/manager/0.log" Jan 22 08:02:47 crc kubenswrapper[4982]: I0122 08:02:47.818024 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-5cd76577f9-ghpnv_4423d43b-5bd0-4e58-adc1-b428a3254273/operator/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.597877 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.612510 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/reloader/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.618291 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/frr-metrics/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.623944 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.631914 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/kube-rbac-proxy-frr/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.637481 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-frr-files/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.644460 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-reloader/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.655372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4v5kf_8b5087c1-3eb3-46f7-8013-66ffbf2e6f69/cp-metrics/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.662255 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-5f6hr_14e2cb27-a2f0-45fe-8d0a-3be2c34c57aa/frr-k8s-webhook-server/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.695894 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-647bb87bbd-fvz54_d6294b08-62b7-465e-a5af-08f9bf1e5ff8/manager/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.698183 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5bc8cc58c6-wjczj_44543400-72e1-4ae7-976e-901b6ebac4b2/manager/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.707017 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-579c7888b9-9pz77_2b28eec2-730e-422f-b384-8b60cb00a45f/webhook-server/0.log" Jan 22 08:02:50 crc kubenswrapper[4982]: I0122 08:02:50.912871 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cvnbh_2e0bb805-5881-4548-aebd-0924b927cbdd/registry-server/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.052471 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-284c4_099ae039-177c-4335-a611-990dcdf9c655/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.092118 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-9nfxp_1fa2993a-2231-445d-aa77-7190fb3a8fcb/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.126091 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4dbcc_bd17bdc3-15ba-47d5-88f9-56336faa71bf/operator/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.163488 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-qgxkb_3bde3f49-9b8e-4cb1-8eac-6eb047eda094/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.342965 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-jmsqf_030e0626-7169-45ea-9981-78c910b04226/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.353881 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-k7ffz_afdd5f3a-706e-4f4f-930e-c952e4b0c6dc/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.365463 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-prb7h_8b31e08d-b4f8-482f-b413-25897c734299/manager/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.505119 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/speaker/0.log" Jan 22 08:02:51 crc kubenswrapper[4982]: I0122 08:02:51.515829 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7w7hm_2df7325f-3ac5-49eb-9c91-adc20d7b3e80/kube-rbac-proxy/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.282778 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-644t8_97482988-28a9-41f6-90f9-8ab820afdb27/cert-manager-controller/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.303632 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-sgz2n_cc5f2133-2f6a-4e37-9224-c109867618ec/cert-manager-cainjector/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.315360 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-jbph5_aeb8c181-493c-4f85-8500-f2109edc40f7/cert-manager-webhook/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.665238 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-kg7hd_7c97357b-daac-4b3b-836c-ccd1710cb6bd/nmstate-console-plugin/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.681976 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-q2hd9_2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8/nmstate-handler/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.692048 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-gmdj6_d7966747-6b58-4d2a-b9c6-f85627f187d3/nmstate-metrics/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.699943 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-gmdj6_d7966747-6b58-4d2a-b9c6-f85627f187d3/kube-rbac-proxy/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.712815 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-nv5l8_a58871fe-63e9-47ab-8f67-b9ec61f7b14a/nmstate-operator/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.725232 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-d5tbt_695d8ee6-67a2-42de-a169-5b823f7dce2b/nmstate-webhook/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.849720 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6zct7_1001b154-4839-4c44-a79b-2be8fcbfb706/control-plane-machine-set-operator/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.869224 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wb9ck_3a5a9a72-e46b-4794-8141-04583a99a97a/kube-rbac-proxy/0.log" Jan 22 08:02:52 crc kubenswrapper[4982]: I0122 08:02:52.882346 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wb9ck_3a5a9a72-e46b-4794-8141-04583a99a97a/machine-api-operator/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.501141 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/extract/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.513458 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/util/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.530577 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3078a1b977323d6d8f95a4018ef06f377c198eb1741282ac05e933b603qdnwc_29d8df82-73c3-42da-adea-9c703a1ed81a/pull/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.679549 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-7rhfh_18f9bc75-69ce-4299-ab4a-c280781b056c/manager/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.719156 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:02:53 crc kubenswrapper[4982]: E0122 08:02:53.723512 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.797144 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-22wjh_a7fe3154-ef99-4ce9-9151-605f734269f1/manager/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.810843 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-hstbj_ca35f48e-eddd-46f2-8369-f1e642432834/manager/0.log" Jan 22 08:02:53 crc kubenswrapper[4982]: I0122 08:02:53.962264 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-hjbqs_e884b772-f17f-410c-9a2a-1b87fcda735b/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.003126 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-f4l4h_bca8ffde-e486-4000-8d0b-39a275b64803/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.019607 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-kxwmj_ef693655-09f3-4809-a4b2-8930551fb3f1/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.683968 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-6zh6c_84a649d7-4c8c-4da7-93b3-9e537b0207ee/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.695300 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7zk22_a1825b14-129b-459a-b08b-7e62c7f2414a/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.833394 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-vkn5h_d6afaf94-bf6d-4559-ab50-c2320aade035/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.880702 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-t276t_e63b183e-a84a-4964-a082-5f7768d8c472/manager/0.log" Jan 22 08:02:54 crc kubenswrapper[4982]: I0122 08:02:54.962421 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-tklld_6d6c68f5-4111-471b-875c-0d498c4b046d/manager/0.log" Jan 22 08:02:55 crc kubenswrapper[4982]: I0122 08:02:55.049897 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-6pr97_99845353-4c2d-4caa-bf12-c396d6b91a82/manager/0.log" Jan 22 08:02:55 crc kubenswrapper[4982]: I0122 08:02:55.252656 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-rk69w_7cbf8f35-c5cf-4e1c-8718-023380d9ac26/manager/0.log" Jan 22 08:02:55 crc kubenswrapper[4982]: I0122 08:02:55.312221 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-f44cg_4c78936d-aa34-45c2-8e85-67f6de306d0f/manager/0.log" Jan 22 08:02:55 crc kubenswrapper[4982]: I0122 08:02:55.323873 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g_47ed3df3-a23e-4021-b786-b99d1b710639/manager/0.log" Jan 22 08:02:55 crc kubenswrapper[4982]: I0122 08:02:55.538333 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-5cd76577f9-ghpnv_4423d43b-5bd0-4e58-adc1-b428a3254273/operator/0.log" Jan 22 08:02:57 crc kubenswrapper[4982]: I0122 08:02:57.773495 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-647bb87bbd-fvz54_d6294b08-62b7-465e-a5af-08f9bf1e5ff8/manager/0.log" Jan 22 08:02:57 crc kubenswrapper[4982]: I0122 08:02:57.906679 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cvnbh_2e0bb805-5881-4548-aebd-0924b927cbdd/registry-server/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.010589 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-284c4_099ae039-177c-4335-a611-990dcdf9c655/manager/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.048074 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-9nfxp_1fa2993a-2231-445d-aa77-7190fb3a8fcb/manager/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.079208 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4dbcc_bd17bdc3-15ba-47d5-88f9-56336faa71bf/operator/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.111636 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-qgxkb_3bde3f49-9b8e-4cb1-8eac-6eb047eda094/manager/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.314605 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-jmsqf_030e0626-7169-45ea-9981-78c910b04226/manager/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.323321 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-k7ffz_afdd5f3a-706e-4f4f-930e-c952e4b0c6dc/manager/0.log" Jan 22 08:02:58 crc kubenswrapper[4982]: I0122 08:02:58.335047 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-prb7h_8b31e08d-b4f8-482f-b413-25897c734299/manager/0.log" Jan 22 08:02:59 crc kubenswrapper[4982]: I0122 08:02:59.993296 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/2.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.179379 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4xrw6_f70d53ec-9c73-45bf-b6b4-ec45565ef1e6/kube-multus/3.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.191438 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/kube-multus-additional-cni-plugins/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.201395 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/egress-router-binary-copy/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.212503 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/cni-plugins/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.222600 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/bond-cni-plugin/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.229373 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/routeoverride-cni/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.236249 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/whereabouts-cni-bincopy/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.244516 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f98dk_64ec5ffc-3edf-4820-bc21-58b1ace5615c/whereabouts-cni/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.286901 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-xrrnp_0c85d6b4-ae35-4226-ae70-8ee0f059fb80/multus-admission-controller/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.292107 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-xrrnp_0c85d6b4-ae35-4226-ae70-8ee0f059fb80/kube-rbac-proxy/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.356447 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-gxwx2_377cd579-2ade-48ea-ad2a-44d1546fd5fb/network-metrics-daemon/0.log" Jan 22 08:03:00 crc kubenswrapper[4982]: I0122 08:03:00.362412 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-gxwx2_377cd579-2ade-48ea-ad2a-44d1546fd5fb/kube-rbac-proxy/0.log" Jan 22 08:03:04 crc kubenswrapper[4982]: I0122 08:03:04.720042 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:03:04 crc kubenswrapper[4982]: E0122 08:03:04.721002 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.337494 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:06 crc kubenswrapper[4982]: E0122 08:03:06.338391 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238cc454-61e6-48fb-8658-8e27bedc24e8" containerName="keystone-cron" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.338410 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="238cc454-61e6-48fb-8658-8e27bedc24e8" containerName="keystone-cron" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.338750 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="238cc454-61e6-48fb-8658-8e27bedc24e8" containerName="keystone-cron" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.341005 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.361421 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.391389 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.391581 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.391761 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl7d6\" (UniqueName: \"kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.492609 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl7d6\" (UniqueName: \"kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.492682 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.492743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.493291 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.493741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.514199 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl7d6\" (UniqueName: \"kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6\") pod \"redhat-marketplace-k5jvn\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:06 crc kubenswrapper[4982]: I0122 08:03:06.666119 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:07 crc kubenswrapper[4982]: I0122 08:03:07.155320 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:07 crc kubenswrapper[4982]: I0122 08:03:07.594144 4982 generic.go:334] "Generic (PLEG): container finished" podID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerID="a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba" exitCode=0 Jan 22 08:03:07 crc kubenswrapper[4982]: I0122 08:03:07.594640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerDied","Data":"a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba"} Jan 22 08:03:07 crc kubenswrapper[4982]: I0122 08:03:07.594677 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerStarted","Data":"ae1317a0368b9ac85b8d2e3e3f05e972d8c45227b87854040f2fe193aa8b4602"} Jan 22 08:03:07 crc kubenswrapper[4982]: I0122 08:03:07.596904 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:03:08 crc kubenswrapper[4982]: I0122 08:03:08.605438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerStarted","Data":"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e"} Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.523034 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.526367 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.569345 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.617425 4982 generic.go:334] "Generic (PLEG): container finished" podID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerID="f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e" exitCode=0 Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.617490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerDied","Data":"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e"} Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.674213 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5wv6\" (UniqueName: \"kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.674328 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.674355 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.776241 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.777126 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.777434 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5wv6\" (UniqueName: \"kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.777071 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.778102 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.798509 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5wv6\" (UniqueName: \"kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6\") pod \"certified-operators-2jx68\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:09 crc kubenswrapper[4982]: I0122 08:03:09.869181 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.412446 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.628013 4982 generic.go:334] "Generic (PLEG): container finished" podID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerID="fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0" exitCode=0 Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.628407 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerDied","Data":"fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0"} Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.628448 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerStarted","Data":"a093156b507b95cafcf21ada18a458f574ba024375b430d8d20ae17db004ae5e"} Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.631161 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerStarted","Data":"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5"} Jan 22 08:03:10 crc kubenswrapper[4982]: I0122 08:03:10.671562 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k5jvn" podStartSLOduration=2.201117736 podStartE2EDuration="4.671542463s" podCreationTimestamp="2026-01-22 08:03:06 +0000 UTC" firstStartedPulling="2026-01-22 08:03:07.596336979 +0000 UTC m=+8248.434975002" lastFinishedPulling="2026-01-22 08:03:10.066761726 +0000 UTC m=+8250.905399729" observedRunningTime="2026-01-22 08:03:10.66629127 +0000 UTC m=+8251.504929263" watchObservedRunningTime="2026-01-22 08:03:10.671542463 +0000 UTC m=+8251.510180466" Jan 22 08:03:11 crc kubenswrapper[4982]: I0122 08:03:11.642211 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerStarted","Data":"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993"} Jan 22 08:03:12 crc kubenswrapper[4982]: I0122 08:03:12.652387 4982 generic.go:334] "Generic (PLEG): container finished" podID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerID="759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993" exitCode=0 Jan 22 08:03:12 crc kubenswrapper[4982]: I0122 08:03:12.652474 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerDied","Data":"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993"} Jan 22 08:03:13 crc kubenswrapper[4982]: I0122 08:03:13.667329 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerStarted","Data":"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44"} Jan 22 08:03:13 crc kubenswrapper[4982]: I0122 08:03:13.697322 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2jx68" podStartSLOduration=2.235925735 podStartE2EDuration="4.697301796s" podCreationTimestamp="2026-01-22 08:03:09 +0000 UTC" firstStartedPulling="2026-01-22 08:03:10.629542825 +0000 UTC m=+8251.468180828" lastFinishedPulling="2026-01-22 08:03:13.090918876 +0000 UTC m=+8253.929556889" observedRunningTime="2026-01-22 08:03:13.68526124 +0000 UTC m=+8254.523899243" watchObservedRunningTime="2026-01-22 08:03:13.697301796 +0000 UTC m=+8254.535939799" Jan 22 08:03:16 crc kubenswrapper[4982]: I0122 08:03:16.666769 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:16 crc kubenswrapper[4982]: I0122 08:03:16.667355 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:16 crc kubenswrapper[4982]: I0122 08:03:16.716307 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:16 crc kubenswrapper[4982]: I0122 08:03:16.774870 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:17 crc kubenswrapper[4982]: I0122 08:03:17.912095 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:18 crc kubenswrapper[4982]: I0122 08:03:18.714615 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k5jvn" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="registry-server" containerID="cri-o://b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5" gracePeriod=2 Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.177979 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.319779 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content\") pod \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.319899 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities\") pod \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.319963 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl7d6\" (UniqueName: \"kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6\") pod \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\" (UID: \"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a\") " Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.321936 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities" (OuterVolumeSpecName: "utilities") pod "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" (UID: "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.328238 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6" (OuterVolumeSpecName: "kube-api-access-cl7d6") pod "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" (UID: "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a"). InnerVolumeSpecName "kube-api-access-cl7d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.342481 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" (UID: "1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.422812 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.422844 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.422869 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl7d6\" (UniqueName: \"kubernetes.io/projected/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a-kube-api-access-cl7d6\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.759426 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:03:19 crc kubenswrapper[4982]: E0122 08:03:19.759934 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.764892 4982 generic.go:334] "Generic (PLEG): container finished" podID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerID="b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5" exitCode=0 Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.764971 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerDied","Data":"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5"} Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.765000 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k5jvn" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.765060 4982 scope.go:117] "RemoveContainer" containerID="b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.765024 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k5jvn" event={"ID":"1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a","Type":"ContainerDied","Data":"ae1317a0368b9ac85b8d2e3e3f05e972d8c45227b87854040f2fe193aa8b4602"} Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.795132 4982 scope.go:117] "RemoveContainer" containerID="f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.817294 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.831441 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k5jvn"] Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.836302 4982 scope.go:117] "RemoveContainer" containerID="a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.869788 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.869838 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.899558 4982 scope.go:117] "RemoveContainer" containerID="b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5" Jan 22 08:03:19 crc kubenswrapper[4982]: E0122 08:03:19.900195 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5\": container with ID starting with b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5 not found: ID does not exist" containerID="b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.900259 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5"} err="failed to get container status \"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5\": rpc error: code = NotFound desc = could not find container \"b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5\": container with ID starting with b1acf7f09bb044d3eeeed55ca0dc9eeeba13136dc1fb160644f8893154e420a5 not found: ID does not exist" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.900295 4982 scope.go:117] "RemoveContainer" containerID="f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e" Jan 22 08:03:19 crc kubenswrapper[4982]: E0122 08:03:19.901188 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e\": container with ID starting with f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e not found: ID does not exist" containerID="f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.901224 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e"} err="failed to get container status \"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e\": rpc error: code = NotFound desc = could not find container \"f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e\": container with ID starting with f52b5bada754ed2a34872d2ab1e7bfdccbf0632aaf49da470bc5b4dfbdd18e9e not found: ID does not exist" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.901256 4982 scope.go:117] "RemoveContainer" containerID="a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba" Jan 22 08:03:19 crc kubenswrapper[4982]: E0122 08:03:19.901539 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba\": container with ID starting with a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba not found: ID does not exist" containerID="a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.901558 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba"} err="failed to get container status \"a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba\": rpc error: code = NotFound desc = could not find container \"a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba\": container with ID starting with a125740a39770df6a2fa64f9d1e4a42a97475190d5cdbf78208303e0412b3fba not found: ID does not exist" Jan 22 08:03:19 crc kubenswrapper[4982]: I0122 08:03:19.927509 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:20 crc kubenswrapper[4982]: I0122 08:03:20.863249 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:21 crc kubenswrapper[4982]: I0122 08:03:21.734005 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" path="/var/lib/kubelet/pods/1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a/volumes" Jan 22 08:03:22 crc kubenswrapper[4982]: I0122 08:03:22.306063 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:22 crc kubenswrapper[4982]: I0122 08:03:22.816812 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2jx68" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="registry-server" containerID="cri-o://a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44" gracePeriod=2 Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.391945 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.535414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5wv6\" (UniqueName: \"kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6\") pod \"69db0c85-de98-47c2-b179-7e36442ee1ac\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.535557 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content\") pod \"69db0c85-de98-47c2-b179-7e36442ee1ac\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.535698 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities\") pod \"69db0c85-de98-47c2-b179-7e36442ee1ac\" (UID: \"69db0c85-de98-47c2-b179-7e36442ee1ac\") " Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.536550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities" (OuterVolumeSpecName: "utilities") pod "69db0c85-de98-47c2-b179-7e36442ee1ac" (UID: "69db0c85-de98-47c2-b179-7e36442ee1ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.543956 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6" (OuterVolumeSpecName: "kube-api-access-s5wv6") pod "69db0c85-de98-47c2-b179-7e36442ee1ac" (UID: "69db0c85-de98-47c2-b179-7e36442ee1ac"). InnerVolumeSpecName "kube-api-access-s5wv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.577375 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69db0c85-de98-47c2-b179-7e36442ee1ac" (UID: "69db0c85-de98-47c2-b179-7e36442ee1ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.639357 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.639416 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69db0c85-de98-47c2-b179-7e36442ee1ac-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.639431 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5wv6\" (UniqueName: \"kubernetes.io/projected/69db0c85-de98-47c2-b179-7e36442ee1ac-kube-api-access-s5wv6\") on node \"crc\" DevicePath \"\"" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.831795 4982 generic.go:334] "Generic (PLEG): container finished" podID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerID="a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44" exitCode=0 Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.831838 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerDied","Data":"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44"} Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.831916 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2jx68" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.831978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2jx68" event={"ID":"69db0c85-de98-47c2-b179-7e36442ee1ac","Type":"ContainerDied","Data":"a093156b507b95cafcf21ada18a458f574ba024375b430d8d20ae17db004ae5e"} Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.832016 4982 scope.go:117] "RemoveContainer" containerID="a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.862117 4982 scope.go:117] "RemoveContainer" containerID="759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.865702 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.875357 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2jx68"] Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.888539 4982 scope.go:117] "RemoveContainer" containerID="fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.966888 4982 scope.go:117] "RemoveContainer" containerID="a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44" Jan 22 08:03:23 crc kubenswrapper[4982]: E0122 08:03:23.967606 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44\": container with ID starting with a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44 not found: ID does not exist" containerID="a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.967647 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44"} err="failed to get container status \"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44\": rpc error: code = NotFound desc = could not find container \"a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44\": container with ID starting with a0e6a5707295bb6cf939dc603e69c3406479b46b18fea46a3a1baa93c3532f44 not found: ID does not exist" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.967680 4982 scope.go:117] "RemoveContainer" containerID="759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993" Jan 22 08:03:23 crc kubenswrapper[4982]: E0122 08:03:23.968331 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993\": container with ID starting with 759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993 not found: ID does not exist" containerID="759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.968384 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993"} err="failed to get container status \"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993\": rpc error: code = NotFound desc = could not find container \"759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993\": container with ID starting with 759bc5298ec6ac769cd4e1293c8313245e12e9b1e4d3bf5521b03f2312bbc993 not found: ID does not exist" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.968411 4982 scope.go:117] "RemoveContainer" containerID="fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0" Jan 22 08:03:23 crc kubenswrapper[4982]: E0122 08:03:23.968741 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0\": container with ID starting with fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0 not found: ID does not exist" containerID="fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0" Jan 22 08:03:23 crc kubenswrapper[4982]: I0122 08:03:23.968777 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0"} err="failed to get container status \"fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0\": rpc error: code = NotFound desc = could not find container \"fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0\": container with ID starting with fb5204a0f5a6b6ecf4dac71d04d6617956ed585017e8f4126237134e7e7ab6b0 not found: ID does not exist" Jan 22 08:03:25 crc kubenswrapper[4982]: I0122 08:03:25.741354 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" path="/var/lib/kubelet/pods/69db0c85-de98-47c2-b179-7e36442ee1ac/volumes" Jan 22 08:03:33 crc kubenswrapper[4982]: I0122 08:03:33.720943 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:03:33 crc kubenswrapper[4982]: E0122 08:03:33.721732 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:03:48 crc kubenswrapper[4982]: I0122 08:03:48.719105 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:03:48 crc kubenswrapper[4982]: E0122 08:03:48.720005 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:04:00 crc kubenswrapper[4982]: I0122 08:04:00.720648 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:04:00 crc kubenswrapper[4982]: E0122 08:04:00.721409 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:04:10 crc kubenswrapper[4982]: I0122 08:04:10.215237 4982 scope.go:117] "RemoveContainer" containerID="06bbadccfbfa508b88145f8e0b737df9e9d4f7f6d076a61eab483cf27c7c8ea4" Jan 22 08:04:12 crc kubenswrapper[4982]: I0122 08:04:12.718976 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:04:12 crc kubenswrapper[4982]: E0122 08:04:12.719846 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:04:23 crc kubenswrapper[4982]: I0122 08:04:23.719796 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:04:23 crc kubenswrapper[4982]: E0122 08:04:23.722004 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:04:35 crc kubenswrapper[4982]: I0122 08:04:35.719796 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:04:35 crc kubenswrapper[4982]: E0122 08:04:35.721281 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:04:47 crc kubenswrapper[4982]: I0122 08:04:47.720170 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:04:47 crc kubenswrapper[4982]: E0122 08:04:47.721059 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:05:01 crc kubenswrapper[4982]: I0122 08:05:01.723414 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:05:01 crc kubenswrapper[4982]: E0122 08:05:01.724467 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:05:10 crc kubenswrapper[4982]: I0122 08:05:10.325832 4982 scope.go:117] "RemoveContainer" containerID="63a2f9c61b22229f6957d5d64f8bc802f7c7003b5033c74b8ca2ad5fd0466703" Jan 22 08:05:13 crc kubenswrapper[4982]: I0122 08:05:13.719548 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:05:13 crc kubenswrapper[4982]: E0122 08:05:13.720457 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:05:24 crc kubenswrapper[4982]: I0122 08:05:24.721660 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:05:24 crc kubenswrapper[4982]: E0122 08:05:24.722605 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:05:35 crc kubenswrapper[4982]: I0122 08:05:35.720558 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:05:35 crc kubenswrapper[4982]: E0122 08:05:35.721515 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:05:50 crc kubenswrapper[4982]: I0122 08:05:50.722395 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:05:50 crc kubenswrapper[4982]: E0122 08:05:50.723765 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:06:02 crc kubenswrapper[4982]: I0122 08:06:02.719568 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:06:02 crc kubenswrapper[4982]: E0122 08:06:02.720582 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:06:16 crc kubenswrapper[4982]: I0122 08:06:16.719053 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:06:16 crc kubenswrapper[4982]: E0122 08:06:16.719977 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:06:30 crc kubenswrapper[4982]: I0122 08:06:30.719704 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:06:30 crc kubenswrapper[4982]: E0122 08:06:30.720393 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:06:42 crc kubenswrapper[4982]: I0122 08:06:42.720028 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:06:42 crc kubenswrapper[4982]: E0122 08:06:42.720722 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:06:56 crc kubenswrapper[4982]: I0122 08:06:56.720127 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:06:57 crc kubenswrapper[4982]: I0122 08:06:57.055979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863"} Jan 22 08:09:18 crc kubenswrapper[4982]: I0122 08:09:18.973979 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:09:18 crc kubenswrapper[4982]: I0122 08:09:18.974760 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:09:48 crc kubenswrapper[4982]: I0122 08:09:48.973964 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:09:48 crc kubenswrapper[4982]: I0122 08:09:48.974362 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:10:18 crc kubenswrapper[4982]: I0122 08:10:18.974095 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:10:18 crc kubenswrapper[4982]: I0122 08:10:18.974599 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:10:18 crc kubenswrapper[4982]: I0122 08:10:18.974642 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:10:18 crc kubenswrapper[4982]: I0122 08:10:18.975422 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:10:18 crc kubenswrapper[4982]: I0122 08:10:18.975468 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863" gracePeriod=600 Jan 22 08:10:19 crc kubenswrapper[4982]: I0122 08:10:19.276935 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863" exitCode=0 Jan 22 08:10:19 crc kubenswrapper[4982]: I0122 08:10:19.276988 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863"} Jan 22 08:10:19 crc kubenswrapper[4982]: I0122 08:10:19.277238 4982 scope.go:117] "RemoveContainer" containerID="ad35a75a6ffc855bc5d4d1f61c3fce87b1d0fa2160bc99b21b1ee14203e9b724" Jan 22 08:10:20 crc kubenswrapper[4982]: I0122 08:10:20.289262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4"} Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.031412 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032321 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032333 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032353 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="extract-utilities" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032360 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="extract-utilities" Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032378 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="extract-content" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032385 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="extract-content" Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032408 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="extract-content" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032413 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="extract-content" Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032425 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="extract-utilities" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032431 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="extract-utilities" Jan 22 08:10:29 crc kubenswrapper[4982]: E0122 08:10:29.032438 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032443 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032684 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d2bf0eb-fdfc-48b1-9d4a-66b360d7ce6a" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.032714 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="69db0c85-de98-47c2-b179-7e36442ee1ac" containerName="registry-server" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.034248 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.045482 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.159003 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.159074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzfr7\" (UniqueName: \"kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.159279 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.231768 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.233982 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.243290 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.261636 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.261712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.261754 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzfr7\" (UniqueName: \"kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.262232 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.262266 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.291633 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzfr7\" (UniqueName: \"kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7\") pod \"redhat-operators-xm4pb\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.359789 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.363638 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.363873 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5dqn\" (UniqueName: \"kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.363959 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.465476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.465713 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5dqn\" (UniqueName: \"kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.465781 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.466101 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.466368 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.484943 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5dqn\" (UniqueName: \"kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn\") pod \"community-operators-6mw6q\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.549731 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:29 crc kubenswrapper[4982]: I0122 08:10:29.835421 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.011962 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.384371 4982 generic.go:334] "Generic (PLEG): container finished" podID="847d5513-0443-440e-ac24-c8f30e0288b2" containerID="ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec" exitCode=0 Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.384457 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerDied","Data":"ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec"} Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.384487 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerStarted","Data":"fd95196b719e66e99a2ceb408fe5793a83247c80debd0b808b528799f5ed5d33"} Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.386507 4982 generic.go:334] "Generic (PLEG): container finished" podID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerID="8c24d3736f05f5c4cbee0de352bd40485224b0f61a23542729b0114242de625b" exitCode=0 Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.386536 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerDied","Data":"8c24d3736f05f5c4cbee0de352bd40485224b0f61a23542729b0114242de625b"} Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.386560 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerStarted","Data":"8f2f97cedfcd9e15ef5033af13f1f1c99a28bd9f6df79dd83740326e4d31b6fb"} Jan 22 08:10:30 crc kubenswrapper[4982]: I0122 08:10:30.387219 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:10:32 crc kubenswrapper[4982]: I0122 08:10:32.407111 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerStarted","Data":"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60"} Jan 22 08:10:32 crc kubenswrapper[4982]: I0122 08:10:32.409207 4982 generic.go:334] "Generic (PLEG): container finished" podID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerID="d27b2cd18f46207a8af6b2015a8ceebcbba179556590873df7a799398fb6bb58" exitCode=0 Jan 22 08:10:32 crc kubenswrapper[4982]: I0122 08:10:32.409268 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerDied","Data":"d27b2cd18f46207a8af6b2015a8ceebcbba179556590873df7a799398fb6bb58"} Jan 22 08:10:33 crc kubenswrapper[4982]: I0122 08:10:33.426802 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerStarted","Data":"72fe19d6eb6face36956ed6e615cc8b335095dd555c7da8f7d76a85324245645"} Jan 22 08:10:33 crc kubenswrapper[4982]: I0122 08:10:33.460163 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6mw6q" podStartSLOduration=1.897686097 podStartE2EDuration="4.460144042s" podCreationTimestamp="2026-01-22 08:10:29 +0000 UTC" firstStartedPulling="2026-01-22 08:10:30.389124628 +0000 UTC m=+8691.227762631" lastFinishedPulling="2026-01-22 08:10:32.951582573 +0000 UTC m=+8693.790220576" observedRunningTime="2026-01-22 08:10:33.448974108 +0000 UTC m=+8694.287612121" watchObservedRunningTime="2026-01-22 08:10:33.460144042 +0000 UTC m=+8694.298782035" Jan 22 08:10:34 crc kubenswrapper[4982]: I0122 08:10:34.444976 4982 generic.go:334] "Generic (PLEG): container finished" podID="847d5513-0443-440e-ac24-c8f30e0288b2" containerID="7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60" exitCode=0 Jan 22 08:10:34 crc kubenswrapper[4982]: I0122 08:10:34.445062 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerDied","Data":"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60"} Jan 22 08:10:35 crc kubenswrapper[4982]: I0122 08:10:35.457107 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerStarted","Data":"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950"} Jan 22 08:10:35 crc kubenswrapper[4982]: I0122 08:10:35.479158 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xm4pb" podStartSLOduration=2.048729512 podStartE2EDuration="6.479137868s" podCreationTimestamp="2026-01-22 08:10:29 +0000 UTC" firstStartedPulling="2026-01-22 08:10:30.386955489 +0000 UTC m=+8691.225593492" lastFinishedPulling="2026-01-22 08:10:34.817363845 +0000 UTC m=+8695.656001848" observedRunningTime="2026-01-22 08:10:35.473371881 +0000 UTC m=+8696.312009884" watchObservedRunningTime="2026-01-22 08:10:35.479137868 +0000 UTC m=+8696.317775861" Jan 22 08:10:39 crc kubenswrapper[4982]: I0122 08:10:39.360041 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:39 crc kubenswrapper[4982]: I0122 08:10:39.361440 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:39 crc kubenswrapper[4982]: I0122 08:10:39.550164 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:39 crc kubenswrapper[4982]: I0122 08:10:39.550221 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:39 crc kubenswrapper[4982]: I0122 08:10:39.612540 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:40 crc kubenswrapper[4982]: I0122 08:10:40.432659 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xm4pb" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="registry-server" probeResult="failure" output=< Jan 22 08:10:40 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 08:10:40 crc kubenswrapper[4982]: > Jan 22 08:10:40 crc kubenswrapper[4982]: I0122 08:10:40.563091 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:40 crc kubenswrapper[4982]: I0122 08:10:40.616826 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:42 crc kubenswrapper[4982]: I0122 08:10:42.532649 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6mw6q" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="registry-server" containerID="cri-o://72fe19d6eb6face36956ed6e615cc8b335095dd555c7da8f7d76a85324245645" gracePeriod=2 Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.547936 4982 generic.go:334] "Generic (PLEG): container finished" podID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerID="72fe19d6eb6face36956ed6e615cc8b335095dd555c7da8f7d76a85324245645" exitCode=0 Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.548410 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerDied","Data":"72fe19d6eb6face36956ed6e615cc8b335095dd555c7da8f7d76a85324245645"} Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.548437 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mw6q" event={"ID":"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7","Type":"ContainerDied","Data":"8f2f97cedfcd9e15ef5033af13f1f1c99a28bd9f6df79dd83740326e4d31b6fb"} Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.548449 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f2f97cedfcd9e15ef5033af13f1f1c99a28bd9f6df79dd83740326e4d31b6fb" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.582329 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.710331 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5dqn\" (UniqueName: \"kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn\") pod \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.710587 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities\") pod \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.710646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content\") pod \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\" (UID: \"8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7\") " Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.711815 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities" (OuterVolumeSpecName: "utilities") pod "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" (UID: "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.717112 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn" (OuterVolumeSpecName: "kube-api-access-t5dqn") pod "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" (UID: "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7"). InnerVolumeSpecName "kube-api-access-t5dqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.762971 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" (UID: "8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.814344 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.814373 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:43 crc kubenswrapper[4982]: I0122 08:10:43.814383 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5dqn\" (UniqueName: \"kubernetes.io/projected/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7-kube-api-access-t5dqn\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:44 crc kubenswrapper[4982]: I0122 08:10:44.561833 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mw6q" Jan 22 08:10:44 crc kubenswrapper[4982]: I0122 08:10:44.617716 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:44 crc kubenswrapper[4982]: I0122 08:10:44.629539 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6mw6q"] Jan 22 08:10:45 crc kubenswrapper[4982]: I0122 08:10:45.732868 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" path="/var/lib/kubelet/pods/8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7/volumes" Jan 22 08:10:49 crc kubenswrapper[4982]: I0122 08:10:49.403712 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:49 crc kubenswrapper[4982]: I0122 08:10:49.460279 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:49 crc kubenswrapper[4982]: I0122 08:10:49.647899 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:50 crc kubenswrapper[4982]: I0122 08:10:50.629971 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xm4pb" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="registry-server" containerID="cri-o://40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950" gracePeriod=2 Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.115099 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.131582 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities\") pod \"847d5513-0443-440e-ac24-c8f30e0288b2\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.131692 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzfr7\" (UniqueName: \"kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7\") pod \"847d5513-0443-440e-ac24-c8f30e0288b2\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.131743 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content\") pod \"847d5513-0443-440e-ac24-c8f30e0288b2\" (UID: \"847d5513-0443-440e-ac24-c8f30e0288b2\") " Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.134442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities" (OuterVolumeSpecName: "utilities") pod "847d5513-0443-440e-ac24-c8f30e0288b2" (UID: "847d5513-0443-440e-ac24-c8f30e0288b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.142882 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7" (OuterVolumeSpecName: "kube-api-access-tzfr7") pod "847d5513-0443-440e-ac24-c8f30e0288b2" (UID: "847d5513-0443-440e-ac24-c8f30e0288b2"). InnerVolumeSpecName "kube-api-access-tzfr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.233420 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.233452 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzfr7\" (UniqueName: \"kubernetes.io/projected/847d5513-0443-440e-ac24-c8f30e0288b2-kube-api-access-tzfr7\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.256599 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "847d5513-0443-440e-ac24-c8f30e0288b2" (UID: "847d5513-0443-440e-ac24-c8f30e0288b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.334426 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/847d5513-0443-440e-ac24-c8f30e0288b2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.641519 4982 generic.go:334] "Generic (PLEG): container finished" podID="847d5513-0443-440e-ac24-c8f30e0288b2" containerID="40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950" exitCode=0 Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.641566 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerDied","Data":"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950"} Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.641597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xm4pb" event={"ID":"847d5513-0443-440e-ac24-c8f30e0288b2","Type":"ContainerDied","Data":"fd95196b719e66e99a2ceb408fe5793a83247c80debd0b808b528799f5ed5d33"} Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.641618 4982 scope.go:117] "RemoveContainer" containerID="40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.641777 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xm4pb" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.667508 4982 scope.go:117] "RemoveContainer" containerID="7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.694786 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.699621 4982 scope.go:117] "RemoveContainer" containerID="ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.707303 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xm4pb"] Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.749070 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" path="/var/lib/kubelet/pods/847d5513-0443-440e-ac24-c8f30e0288b2/volumes" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.770816 4982 scope.go:117] "RemoveContainer" containerID="40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950" Jan 22 08:10:51 crc kubenswrapper[4982]: E0122 08:10:51.771399 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950\": container with ID starting with 40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950 not found: ID does not exist" containerID="40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.771438 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950"} err="failed to get container status \"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950\": rpc error: code = NotFound desc = could not find container \"40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950\": container with ID starting with 40d5b7987d81a0a4ea7f6e515381ee9051e6e138446d39f090afdabc71f2e950 not found: ID does not exist" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.771459 4982 scope.go:117] "RemoveContainer" containerID="7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60" Jan 22 08:10:51 crc kubenswrapper[4982]: E0122 08:10:51.772046 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60\": container with ID starting with 7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60 not found: ID does not exist" containerID="7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.772075 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60"} err="failed to get container status \"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60\": rpc error: code = NotFound desc = could not find container \"7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60\": container with ID starting with 7921b0620355e37155089a55495993fbff946ecbad11fc6a5540fff9af909c60 not found: ID does not exist" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.772089 4982 scope.go:117] "RemoveContainer" containerID="ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec" Jan 22 08:10:51 crc kubenswrapper[4982]: E0122 08:10:51.772420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec\": container with ID starting with ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec not found: ID does not exist" containerID="ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec" Jan 22 08:10:51 crc kubenswrapper[4982]: I0122 08:10:51.772438 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec"} err="failed to get container status \"ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec\": rpc error: code = NotFound desc = could not find container \"ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec\": container with ID starting with ffa5cdcdcc11b265f0f494fe13c78e93f88f4273587bf11c8f5813e96c03b4ec not found: ID does not exist" Jan 22 08:12:48 crc kubenswrapper[4982]: I0122 08:12:48.973982 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:12:48 crc kubenswrapper[4982]: I0122 08:12:48.974565 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.368372 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.370573 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="extract-content" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.370694 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="extract-content" Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.370789 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.370905 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.371035 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="extract-utilities" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.371122 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="extract-utilities" Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.371216 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="extract-content" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.371299 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="extract-content" Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.371395 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.371473 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: E0122 08:13:11.371564 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="extract-utilities" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.371641 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="extract-utilities" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.372033 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="847d5513-0443-440e-ac24-c8f30e0288b2" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.372160 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bc474a7-63f0-45f8-bc1a-fa23dabbb3c7" containerName="registry-server" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.374282 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.406684 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.500997 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.501132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.501167 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc65c\" (UniqueName: \"kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.603198 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.603741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.603765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.604070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc65c\" (UniqueName: \"kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.604285 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.624930 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc65c\" (UniqueName: \"kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c\") pod \"redhat-marketplace-r44cj\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:11 crc kubenswrapper[4982]: I0122 08:13:11.702296 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:12 crc kubenswrapper[4982]: I0122 08:13:12.196746 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:12 crc kubenswrapper[4982]: W0122 08:13:12.201704 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod656acf98_6651_45bc_9245_ced961cb9d8f.slice/crio-a3b5f862ec98a2d83f9bad66d694b6053e741c37a438b23786b44558dc3011e6 WatchSource:0}: Error finding container a3b5f862ec98a2d83f9bad66d694b6053e741c37a438b23786b44558dc3011e6: Status 404 returned error can't find the container with id a3b5f862ec98a2d83f9bad66d694b6053e741c37a438b23786b44558dc3011e6 Jan 22 08:13:13 crc kubenswrapper[4982]: I0122 08:13:13.183506 4982 generic.go:334] "Generic (PLEG): container finished" podID="656acf98-6651-45bc-9245-ced961cb9d8f" containerID="e0f77629d0bb9abd28a7b83bf3819b45ac69e12973939b338d15991178878157" exitCode=0 Jan 22 08:13:13 crc kubenswrapper[4982]: I0122 08:13:13.184000 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerDied","Data":"e0f77629d0bb9abd28a7b83bf3819b45ac69e12973939b338d15991178878157"} Jan 22 08:13:13 crc kubenswrapper[4982]: I0122 08:13:13.184025 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerStarted","Data":"a3b5f862ec98a2d83f9bad66d694b6053e741c37a438b23786b44558dc3011e6"} Jan 22 08:13:15 crc kubenswrapper[4982]: I0122 08:13:15.208922 4982 generic.go:334] "Generic (PLEG): container finished" podID="656acf98-6651-45bc-9245-ced961cb9d8f" containerID="2eafc9c94f523772dd959830720e24c7be029af5a8777ce5f7101ded668ac30d" exitCode=0 Jan 22 08:13:15 crc kubenswrapper[4982]: I0122 08:13:15.209040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerDied","Data":"2eafc9c94f523772dd959830720e24c7be029af5a8777ce5f7101ded668ac30d"} Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.223505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerStarted","Data":"ea2867d88e18c7be3124f623b54cff0fa48b45129f85563d6204ac6588de0c9f"} Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.246630 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r44cj" podStartSLOduration=2.8291928889999998 podStartE2EDuration="5.246602552s" podCreationTimestamp="2026-01-22 08:13:11 +0000 UTC" firstStartedPulling="2026-01-22 08:13:13.186166237 +0000 UTC m=+8854.024804250" lastFinishedPulling="2026-01-22 08:13:15.60357588 +0000 UTC m=+8856.442213913" observedRunningTime="2026-01-22 08:13:16.23919939 +0000 UTC m=+8857.077837393" watchObservedRunningTime="2026-01-22 08:13:16.246602552 +0000 UTC m=+8857.085240545" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.527408 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.533436 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.538543 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.648993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.649312 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.649480 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvsdv\" (UniqueName: \"kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.751553 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.751831 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.752023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvsdv\" (UniqueName: \"kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.752175 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.752316 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.772792 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvsdv\" (UniqueName: \"kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv\") pod \"certified-operators-cmwqf\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:16 crc kubenswrapper[4982]: I0122 08:13:16.866251 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:17 crc kubenswrapper[4982]: I0122 08:13:17.434323 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:17 crc kubenswrapper[4982]: W0122 08:13:17.434473 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaef931e2_0a2a_4b03_a178_1dd95bcc83b7.slice/crio-92ce4123c9b1e3eda114a0a7772fa6ee15b844b067f2fa898ce1978d3662882a WatchSource:0}: Error finding container 92ce4123c9b1e3eda114a0a7772fa6ee15b844b067f2fa898ce1978d3662882a: Status 404 returned error can't find the container with id 92ce4123c9b1e3eda114a0a7772fa6ee15b844b067f2fa898ce1978d3662882a Jan 22 08:13:18 crc kubenswrapper[4982]: I0122 08:13:18.249207 4982 generic.go:334] "Generic (PLEG): container finished" podID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerID="9395cbe1bca680a36dc897a1a4e8f15892dc8ae20d56f19909c55cd3501a2f33" exitCode=0 Jan 22 08:13:18 crc kubenswrapper[4982]: I0122 08:13:18.249651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerDied","Data":"9395cbe1bca680a36dc897a1a4e8f15892dc8ae20d56f19909c55cd3501a2f33"} Jan 22 08:13:18 crc kubenswrapper[4982]: I0122 08:13:18.249693 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerStarted","Data":"92ce4123c9b1e3eda114a0a7772fa6ee15b844b067f2fa898ce1978d3662882a"} Jan 22 08:13:18 crc kubenswrapper[4982]: I0122 08:13:18.973821 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:13:18 crc kubenswrapper[4982]: I0122 08:13:18.975256 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:13:19 crc kubenswrapper[4982]: I0122 08:13:19.262488 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerStarted","Data":"206b2c6962eae4b064ae6ce92adecccf5e174d1b04c83c6182c1e42229aba35b"} Jan 22 08:13:20 crc kubenswrapper[4982]: I0122 08:13:20.279689 4982 generic.go:334] "Generic (PLEG): container finished" podID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerID="206b2c6962eae4b064ae6ce92adecccf5e174d1b04c83c6182c1e42229aba35b" exitCode=0 Jan 22 08:13:20 crc kubenswrapper[4982]: I0122 08:13:20.279748 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerDied","Data":"206b2c6962eae4b064ae6ce92adecccf5e174d1b04c83c6182c1e42229aba35b"} Jan 22 08:13:21 crc kubenswrapper[4982]: I0122 08:13:21.289993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerStarted","Data":"894494c8f1abfdaeacbb033a75d14709b45e017eadffa1befb3fea9a92355669"} Jan 22 08:13:21 crc kubenswrapper[4982]: I0122 08:13:21.328508 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cmwqf" podStartSLOduration=2.732226745 podStartE2EDuration="5.328474172s" podCreationTimestamp="2026-01-22 08:13:16 +0000 UTC" firstStartedPulling="2026-01-22 08:13:18.257294153 +0000 UTC m=+8859.095932156" lastFinishedPulling="2026-01-22 08:13:20.85354159 +0000 UTC m=+8861.692179583" observedRunningTime="2026-01-22 08:13:21.313994177 +0000 UTC m=+8862.152632260" watchObservedRunningTime="2026-01-22 08:13:21.328474172 +0000 UTC m=+8862.167112215" Jan 22 08:13:21 crc kubenswrapper[4982]: I0122 08:13:21.703187 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:21 crc kubenswrapper[4982]: I0122 08:13:21.703234 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:21 crc kubenswrapper[4982]: I0122 08:13:21.760060 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:22 crc kubenswrapper[4982]: I0122 08:13:22.354847 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:23 crc kubenswrapper[4982]: I0122 08:13:23.744295 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:24 crc kubenswrapper[4982]: I0122 08:13:24.323504 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r44cj" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="registry-server" containerID="cri-o://ea2867d88e18c7be3124f623b54cff0fa48b45129f85563d6204ac6588de0c9f" gracePeriod=2 Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.355761 4982 generic.go:334] "Generic (PLEG): container finished" podID="656acf98-6651-45bc-9245-ced961cb9d8f" containerID="ea2867d88e18c7be3124f623b54cff0fa48b45129f85563d6204ac6588de0c9f" exitCode=0 Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.356000 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerDied","Data":"ea2867d88e18c7be3124f623b54cff0fa48b45129f85563d6204ac6588de0c9f"} Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.780222 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.866823 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.866970 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.911897 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc65c\" (UniqueName: \"kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c\") pod \"656acf98-6651-45bc-9245-ced961cb9d8f\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.912060 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities\") pod \"656acf98-6651-45bc-9245-ced961cb9d8f\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.912150 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content\") pod \"656acf98-6651-45bc-9245-ced961cb9d8f\" (UID: \"656acf98-6651-45bc-9245-ced961cb9d8f\") " Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.914225 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities" (OuterVolumeSpecName: "utilities") pod "656acf98-6651-45bc-9245-ced961cb9d8f" (UID: "656acf98-6651-45bc-9245-ced961cb9d8f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.932087 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.939409 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "656acf98-6651-45bc-9245-ced961cb9d8f" (UID: "656acf98-6651-45bc-9245-ced961cb9d8f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:13:26 crc kubenswrapper[4982]: I0122 08:13:26.945184 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c" (OuterVolumeSpecName: "kube-api-access-wc65c") pod "656acf98-6651-45bc-9245-ced961cb9d8f" (UID: "656acf98-6651-45bc-9245-ced961cb9d8f"). InnerVolumeSpecName "kube-api-access-wc65c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.015039 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.015072 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656acf98-6651-45bc-9245-ced961cb9d8f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.015084 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc65c\" (UniqueName: \"kubernetes.io/projected/656acf98-6651-45bc-9245-ced961cb9d8f-kube-api-access-wc65c\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.387497 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r44cj" event={"ID":"656acf98-6651-45bc-9245-ced961cb9d8f","Type":"ContainerDied","Data":"a3b5f862ec98a2d83f9bad66d694b6053e741c37a438b23786b44558dc3011e6"} Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.387589 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r44cj" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.388497 4982 scope.go:117] "RemoveContainer" containerID="ea2867d88e18c7be3124f623b54cff0fa48b45129f85563d6204ac6588de0c9f" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.427186 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.433830 4982 scope.go:117] "RemoveContainer" containerID="2eafc9c94f523772dd959830720e24c7be029af5a8777ce5f7101ded668ac30d" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.435914 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r44cj"] Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.457914 4982 scope.go:117] "RemoveContainer" containerID="e0f77629d0bb9abd28a7b83bf3819b45ac69e12973939b338d15991178878157" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.465560 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:27 crc kubenswrapper[4982]: I0122 08:13:27.734008 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" path="/var/lib/kubelet/pods/656acf98-6651-45bc-9245-ced961cb9d8f/volumes" Jan 22 08:13:29 crc kubenswrapper[4982]: I0122 08:13:29.733734 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:29 crc kubenswrapper[4982]: I0122 08:13:29.734331 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cmwqf" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="registry-server" containerID="cri-o://894494c8f1abfdaeacbb033a75d14709b45e017eadffa1befb3fea9a92355669" gracePeriod=2 Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.421254 4982 generic.go:334] "Generic (PLEG): container finished" podID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerID="894494c8f1abfdaeacbb033a75d14709b45e017eadffa1befb3fea9a92355669" exitCode=0 Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.421463 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerDied","Data":"894494c8f1abfdaeacbb033a75d14709b45e017eadffa1befb3fea9a92355669"} Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.714309 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.802607 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content\") pod \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.802674 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvsdv\" (UniqueName: \"kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv\") pod \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.802706 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities\") pod \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\" (UID: \"aef931e2-0a2a-4b03-a178-1dd95bcc83b7\") " Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.803814 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities" (OuterVolumeSpecName: "utilities") pod "aef931e2-0a2a-4b03-a178-1dd95bcc83b7" (UID: "aef931e2-0a2a-4b03-a178-1dd95bcc83b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.810212 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv" (OuterVolumeSpecName: "kube-api-access-cvsdv") pod "aef931e2-0a2a-4b03-a178-1dd95bcc83b7" (UID: "aef931e2-0a2a-4b03-a178-1dd95bcc83b7"). InnerVolumeSpecName "kube-api-access-cvsdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.853237 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aef931e2-0a2a-4b03-a178-1dd95bcc83b7" (UID: "aef931e2-0a2a-4b03-a178-1dd95bcc83b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.905508 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.905564 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvsdv\" (UniqueName: \"kubernetes.io/projected/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-kube-api-access-cvsdv\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:30 crc kubenswrapper[4982]: I0122 08:13:30.905577 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aef931e2-0a2a-4b03-a178-1dd95bcc83b7-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.433091 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cmwqf" event={"ID":"aef931e2-0a2a-4b03-a178-1dd95bcc83b7","Type":"ContainerDied","Data":"92ce4123c9b1e3eda114a0a7772fa6ee15b844b067f2fa898ce1978d3662882a"} Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.433146 4982 scope.go:117] "RemoveContainer" containerID="894494c8f1abfdaeacbb033a75d14709b45e017eadffa1befb3fea9a92355669" Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.433251 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cmwqf" Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.471255 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.475799 4982 scope.go:117] "RemoveContainer" containerID="206b2c6962eae4b064ae6ce92adecccf5e174d1b04c83c6182c1e42229aba35b" Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.482204 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cmwqf"] Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.495545 4982 scope.go:117] "RemoveContainer" containerID="9395cbe1bca680a36dc897a1a4e8f15892dc8ae20d56f19909c55cd3501a2f33" Jan 22 08:13:31 crc kubenswrapper[4982]: I0122 08:13:31.732419 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" path="/var/lib/kubelet/pods/aef931e2-0a2a-4b03-a178-1dd95bcc83b7/volumes" Jan 22 08:13:48 crc kubenswrapper[4982]: I0122 08:13:48.973524 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:13:48 crc kubenswrapper[4982]: I0122 08:13:48.974136 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:13:48 crc kubenswrapper[4982]: I0122 08:13:48.974185 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:13:48 crc kubenswrapper[4982]: I0122 08:13:48.975041 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:13:48 crc kubenswrapper[4982]: I0122 08:13:48.975094 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" gracePeriod=600 Jan 22 08:13:49 crc kubenswrapper[4982]: E0122 08:13:49.102294 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:13:49 crc kubenswrapper[4982]: I0122 08:13:49.654331 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" exitCode=0 Jan 22 08:13:49 crc kubenswrapper[4982]: I0122 08:13:49.654439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4"} Jan 22 08:13:49 crc kubenswrapper[4982]: I0122 08:13:49.654622 4982 scope.go:117] "RemoveContainer" containerID="bad011a51d0b8a4c9fe84ee893d41254ba44fcb39dbd998cc8ad2672318fc863" Jan 22 08:13:49 crc kubenswrapper[4982]: I0122 08:13:49.655961 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:13:49 crc kubenswrapper[4982]: E0122 08:13:49.656316 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:14:00 crc kubenswrapper[4982]: I0122 08:14:00.720372 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:14:00 crc kubenswrapper[4982]: E0122 08:14:00.721672 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:14:13 crc kubenswrapper[4982]: I0122 08:14:13.719732 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:14:13 crc kubenswrapper[4982]: E0122 08:14:13.720632 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:14:25 crc kubenswrapper[4982]: I0122 08:14:25.719644 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:14:25 crc kubenswrapper[4982]: E0122 08:14:25.720492 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:14:38 crc kubenswrapper[4982]: I0122 08:14:38.719862 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:14:38 crc kubenswrapper[4982]: E0122 08:14:38.720619 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:14:51 crc kubenswrapper[4982]: I0122 08:14:51.719882 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:14:51 crc kubenswrapper[4982]: E0122 08:14:51.720825 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.170426 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4"] Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171283 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="extract-utilities" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171295 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="extract-utilities" Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171306 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="extract-utilities" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171312 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="extract-utilities" Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171330 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="extract-content" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171336 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="extract-content" Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171361 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="extract-content" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171366 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="extract-content" Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171381 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171387 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: E0122 08:15:00.171406 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171414 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171600 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="656acf98-6651-45bc-9245-ced961cb9d8f" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.171616 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="aef931e2-0a2a-4b03-a178-1dd95bcc83b7" containerName="registry-server" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.172373 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.175652 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.175991 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.186932 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4"] Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.315814 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.316185 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjcvg\" (UniqueName: \"kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.316268 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.419093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.419215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjcvg\" (UniqueName: \"kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.419298 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.422403 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.428343 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.455972 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjcvg\" (UniqueName: \"kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg\") pod \"collect-profiles-29484495-zwqn4\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.502277 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:00 crc kubenswrapper[4982]: I0122 08:15:00.981330 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4"] Jan 22 08:15:00 crc kubenswrapper[4982]: W0122 08:15:00.982911 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fe13bcc_4f43_4bf3_bd1d_09349c24d267.slice/crio-2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738 WatchSource:0}: Error finding container 2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738: Status 404 returned error can't find the container with id 2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738 Jan 22 08:15:01 crc kubenswrapper[4982]: I0122 08:15:01.451262 4982 generic.go:334] "Generic (PLEG): container finished" podID="6fe13bcc-4f43-4bf3-bd1d-09349c24d267" containerID="dc06c0e1b716a0883802f5fcc0c7d6f181c63ac6b168e33f80e18fa6ce70e68b" exitCode=0 Jan 22 08:15:01 crc kubenswrapper[4982]: I0122 08:15:01.451319 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" event={"ID":"6fe13bcc-4f43-4bf3-bd1d-09349c24d267","Type":"ContainerDied","Data":"dc06c0e1b716a0883802f5fcc0c7d6f181c63ac6b168e33f80e18fa6ce70e68b"} Jan 22 08:15:01 crc kubenswrapper[4982]: I0122 08:15:01.451498 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" event={"ID":"6fe13bcc-4f43-4bf3-bd1d-09349c24d267","Type":"ContainerStarted","Data":"2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738"} Jan 22 08:15:02 crc kubenswrapper[4982]: I0122 08:15:02.805418 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:15:02 crc kubenswrapper[4982]: E0122 08:15:02.806224 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.128053 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.219188 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume\") pod \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.219265 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjcvg\" (UniqueName: \"kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg\") pod \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.219524 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume\") pod \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\" (UID: \"6fe13bcc-4f43-4bf3-bd1d-09349c24d267\") " Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.220204 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume" (OuterVolumeSpecName: "config-volume") pod "6fe13bcc-4f43-4bf3-bd1d-09349c24d267" (UID: "6fe13bcc-4f43-4bf3-bd1d-09349c24d267"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.226346 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg" (OuterVolumeSpecName: "kube-api-access-zjcvg") pod "6fe13bcc-4f43-4bf3-bd1d-09349c24d267" (UID: "6fe13bcc-4f43-4bf3-bd1d-09349c24d267"). InnerVolumeSpecName "kube-api-access-zjcvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.226881 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6fe13bcc-4f43-4bf3-bd1d-09349c24d267" (UID: "6fe13bcc-4f43-4bf3-bd1d-09349c24d267"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.321928 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.321961 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.321971 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjcvg\" (UniqueName: \"kubernetes.io/projected/6fe13bcc-4f43-4bf3-bd1d-09349c24d267-kube-api-access-zjcvg\") on node \"crc\" DevicePath \"\"" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.471578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" event={"ID":"6fe13bcc-4f43-4bf3-bd1d-09349c24d267","Type":"ContainerDied","Data":"2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738"} Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.471632 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484495-zwqn4" Jan 22 08:15:03 crc kubenswrapper[4982]: I0122 08:15:03.471638 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ddaa79a4d20659d96f3081dac3d9d30102fc1cfa0b3bdc3e1c34d8dae4fe738" Jan 22 08:15:04 crc kubenswrapper[4982]: I0122 08:15:04.208462 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h"] Jan 22 08:15:04 crc kubenswrapper[4982]: I0122 08:15:04.222230 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484450-7s22h"] Jan 22 08:15:05 crc kubenswrapper[4982]: I0122 08:15:05.731702 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="615a38ca-24cd-4f8c-82cf-764c8f0f68bb" path="/var/lib/kubelet/pods/615a38ca-24cd-4f8c-82cf-764c8f0f68bb/volumes" Jan 22 08:15:10 crc kubenswrapper[4982]: I0122 08:15:10.682160 4982 scope.go:117] "RemoveContainer" containerID="3d2d6931a346bf696f7542598db763bb327800d989641511b62eb406f5456755" Jan 22 08:15:17 crc kubenswrapper[4982]: I0122 08:15:17.719710 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:15:17 crc kubenswrapper[4982]: E0122 08:15:17.720451 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:15:30 crc kubenswrapper[4982]: I0122 08:15:30.718774 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:15:30 crc kubenswrapper[4982]: E0122 08:15:30.720681 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:15:45 crc kubenswrapper[4982]: I0122 08:15:45.718997 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:15:45 crc kubenswrapper[4982]: E0122 08:15:45.719958 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:15:59 crc kubenswrapper[4982]: I0122 08:15:59.728742 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:15:59 crc kubenswrapper[4982]: E0122 08:15:59.729691 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:16:14 crc kubenswrapper[4982]: I0122 08:16:14.719167 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:16:14 crc kubenswrapper[4982]: E0122 08:16:14.720193 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:16:27 crc kubenswrapper[4982]: I0122 08:16:27.724775 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:16:27 crc kubenswrapper[4982]: E0122 08:16:27.725620 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:16:41 crc kubenswrapper[4982]: I0122 08:16:41.720048 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:16:41 crc kubenswrapper[4982]: E0122 08:16:41.720723 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:16:54 crc kubenswrapper[4982]: I0122 08:16:54.720308 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:16:54 crc kubenswrapper[4982]: E0122 08:16:54.724073 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:17:06 crc kubenswrapper[4982]: I0122 08:17:06.719804 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:17:06 crc kubenswrapper[4982]: E0122 08:17:06.721210 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:17:10 crc kubenswrapper[4982]: I0122 08:17:10.812046 4982 scope.go:117] "RemoveContainer" containerID="d27b2cd18f46207a8af6b2015a8ceebcbba179556590873df7a799398fb6bb58" Jan 22 08:17:10 crc kubenswrapper[4982]: I0122 08:17:10.863994 4982 scope.go:117] "RemoveContainer" containerID="8c24d3736f05f5c4cbee0de352bd40485224b0f61a23542729b0114242de625b" Jan 22 08:17:10 crc kubenswrapper[4982]: I0122 08:17:10.902539 4982 scope.go:117] "RemoveContainer" containerID="72fe19d6eb6face36956ed6e615cc8b335095dd555c7da8f7d76a85324245645" Jan 22 08:17:18 crc kubenswrapper[4982]: I0122 08:17:18.719892 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:17:18 crc kubenswrapper[4982]: E0122 08:17:18.720843 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:17:31 crc kubenswrapper[4982]: I0122 08:17:31.720537 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:17:31 crc kubenswrapper[4982]: E0122 08:17:31.721515 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:17:44 crc kubenswrapper[4982]: I0122 08:17:44.720415 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:17:44 crc kubenswrapper[4982]: E0122 08:17:44.721405 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:17:58 crc kubenswrapper[4982]: I0122 08:17:58.720060 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:17:58 crc kubenswrapper[4982]: E0122 08:17:58.720812 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:18:11 crc kubenswrapper[4982]: I0122 08:18:11.719648 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:18:11 crc kubenswrapper[4982]: E0122 08:18:11.720828 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:18:26 crc kubenswrapper[4982]: I0122 08:18:26.720323 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:18:26 crc kubenswrapper[4982]: E0122 08:18:26.721571 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:18:38 crc kubenswrapper[4982]: I0122 08:18:38.719477 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:18:38 crc kubenswrapper[4982]: E0122 08:18:38.720366 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:18:50 crc kubenswrapper[4982]: I0122 08:18:50.720562 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:18:52 crc kubenswrapper[4982]: I0122 08:18:52.233244 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31"} Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.638460 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:32 crc kubenswrapper[4982]: E0122 08:20:32.639688 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe13bcc-4f43-4bf3-bd1d-09349c24d267" containerName="collect-profiles" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.639705 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe13bcc-4f43-4bf3-bd1d-09349c24d267" containerName="collect-profiles" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.640022 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fe13bcc-4f43-4bf3-bd1d-09349c24d267" containerName="collect-profiles" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.641971 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.670152 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.821805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.821924 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmr5b\" (UniqueName: \"kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.822019 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.923519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.924035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.924054 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.924146 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmr5b\" (UniqueName: \"kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.924441 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.943944 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmr5b\" (UniqueName: \"kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b\") pod \"community-operators-hvbsx\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:32 crc kubenswrapper[4982]: I0122 08:20:32.964172 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:33 crc kubenswrapper[4982]: I0122 08:20:33.519751 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:33 crc kubenswrapper[4982]: W0122 08:20:33.527586 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e4d9541_597c_48bc_b2e7_34b658b89d28.slice/crio-f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77 WatchSource:0}: Error finding container f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77: Status 404 returned error can't find the container with id f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77 Jan 22 08:20:34 crc kubenswrapper[4982]: I0122 08:20:34.442447 4982 generic.go:334] "Generic (PLEG): container finished" podID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerID="8ed98e4809f016387c6f3ede7e00b70e2644b9f283688b410f4ee226e75f4667" exitCode=0 Jan 22 08:20:34 crc kubenswrapper[4982]: I0122 08:20:34.442530 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerDied","Data":"8ed98e4809f016387c6f3ede7e00b70e2644b9f283688b410f4ee226e75f4667"} Jan 22 08:20:34 crc kubenswrapper[4982]: I0122 08:20:34.443030 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerStarted","Data":"f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77"} Jan 22 08:20:34 crc kubenswrapper[4982]: I0122 08:20:34.446186 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:20:35 crc kubenswrapper[4982]: I0122 08:20:35.456922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerStarted","Data":"ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c"} Jan 22 08:20:35 crc kubenswrapper[4982]: E0122 08:20:35.729264 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e4d9541_597c_48bc_b2e7_34b658b89d28.slice/crio-conmon-ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e4d9541_597c_48bc_b2e7_34b658b89d28.slice/crio-ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c.scope\": RecentStats: unable to find data in memory cache]" Jan 22 08:20:36 crc kubenswrapper[4982]: I0122 08:20:36.491289 4982 generic.go:334] "Generic (PLEG): container finished" podID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerID="ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c" exitCode=0 Jan 22 08:20:36 crc kubenswrapper[4982]: I0122 08:20:36.491569 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerDied","Data":"ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c"} Jan 22 08:20:37 crc kubenswrapper[4982]: I0122 08:20:37.509185 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerStarted","Data":"9ca92011d8fc6f1e428f9f95f2f83939ca454db070d0c1e082c9b2cbfbb4ead4"} Jan 22 08:20:37 crc kubenswrapper[4982]: I0122 08:20:37.534606 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hvbsx" podStartSLOduration=3.06497232 podStartE2EDuration="5.534588767s" podCreationTimestamp="2026-01-22 08:20:32 +0000 UTC" firstStartedPulling="2026-01-22 08:20:34.445783357 +0000 UTC m=+9295.284421400" lastFinishedPulling="2026-01-22 08:20:36.915399834 +0000 UTC m=+9297.754037847" observedRunningTime="2026-01-22 08:20:37.533356343 +0000 UTC m=+9298.371994346" watchObservedRunningTime="2026-01-22 08:20:37.534588767 +0000 UTC m=+9298.373226770" Jan 22 08:20:42 crc kubenswrapper[4982]: I0122 08:20:42.965404 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:42 crc kubenswrapper[4982]: I0122 08:20:42.965927 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:43 crc kubenswrapper[4982]: I0122 08:20:43.013532 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:43 crc kubenswrapper[4982]: I0122 08:20:43.633707 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:44 crc kubenswrapper[4982]: I0122 08:20:44.250056 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:45 crc kubenswrapper[4982]: I0122 08:20:45.615071 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hvbsx" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="registry-server" containerID="cri-o://9ca92011d8fc6f1e428f9f95f2f83939ca454db070d0c1e082c9b2cbfbb4ead4" gracePeriod=2 Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.630721 4982 generic.go:334] "Generic (PLEG): container finished" podID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerID="9ca92011d8fc6f1e428f9f95f2f83939ca454db070d0c1e082c9b2cbfbb4ead4" exitCode=0 Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.631527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerDied","Data":"9ca92011d8fc6f1e428f9f95f2f83939ca454db070d0c1e082c9b2cbfbb4ead4"} Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.631557 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvbsx" event={"ID":"8e4d9541-597c-48bc-b2e7-34b658b89d28","Type":"ContainerDied","Data":"f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77"} Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.631572 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f099997eb2daab0ede184243166a24f11f7ce0d437f8c3037d52d0d0dff60b77" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.673292 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.764383 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities\") pod \"8e4d9541-597c-48bc-b2e7-34b658b89d28\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.764696 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content\") pod \"8e4d9541-597c-48bc-b2e7-34b658b89d28\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.764895 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmr5b\" (UniqueName: \"kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b\") pod \"8e4d9541-597c-48bc-b2e7-34b658b89d28\" (UID: \"8e4d9541-597c-48bc-b2e7-34b658b89d28\") " Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.765473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities" (OuterVolumeSpecName: "utilities") pod "8e4d9541-597c-48bc-b2e7-34b658b89d28" (UID: "8e4d9541-597c-48bc-b2e7-34b658b89d28"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.765758 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.783189 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b" (OuterVolumeSpecName: "kube-api-access-nmr5b") pod "8e4d9541-597c-48bc-b2e7-34b658b89d28" (UID: "8e4d9541-597c-48bc-b2e7-34b658b89d28"). InnerVolumeSpecName "kube-api-access-nmr5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.816014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e4d9541-597c-48bc-b2e7-34b658b89d28" (UID: "8e4d9541-597c-48bc-b2e7-34b658b89d28"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.868323 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e4d9541-597c-48bc-b2e7-34b658b89d28-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:20:46 crc kubenswrapper[4982]: I0122 08:20:46.868668 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmr5b\" (UniqueName: \"kubernetes.io/projected/8e4d9541-597c-48bc-b2e7-34b658b89d28-kube-api-access-nmr5b\") on node \"crc\" DevicePath \"\"" Jan 22 08:20:47 crc kubenswrapper[4982]: I0122 08:20:47.642884 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvbsx" Jan 22 08:20:47 crc kubenswrapper[4982]: I0122 08:20:47.682001 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:47 crc kubenswrapper[4982]: I0122 08:20:47.702259 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hvbsx"] Jan 22 08:20:47 crc kubenswrapper[4982]: I0122 08:20:47.739218 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" path="/var/lib/kubelet/pods/8e4d9541-597c-48bc-b2e7-34b658b89d28/volumes" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.136587 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:20:51 crc kubenswrapper[4982]: E0122 08:20:51.138372 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="extract-content" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.138394 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="extract-content" Jan 22 08:20:51 crc kubenswrapper[4982]: E0122 08:20:51.138424 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="extract-utilities" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.138435 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="extract-utilities" Jan 22 08:20:51 crc kubenswrapper[4982]: E0122 08:20:51.138458 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="registry-server" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.138469 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="registry-server" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.138801 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e4d9541-597c-48bc-b2e7-34b658b89d28" containerName="registry-server" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.142588 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.161485 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.267040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.267326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ldk6\" (UniqueName: \"kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.267694 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.369810 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.369963 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ldk6\" (UniqueName: \"kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.370052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.370553 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.370576 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.406058 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ldk6\" (UniqueName: \"kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6\") pod \"redhat-operators-cb5bf\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.472129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:20:51 crc kubenswrapper[4982]: W0122 08:20:51.962625 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2ccae83_1c74_4cdb_b099_5dd5d783b561.slice/crio-29bd6f3278f2bdbae85e7bef45174e732858822025a70d6c7b8d23d60fff162c WatchSource:0}: Error finding container 29bd6f3278f2bdbae85e7bef45174e732858822025a70d6c7b8d23d60fff162c: Status 404 returned error can't find the container with id 29bd6f3278f2bdbae85e7bef45174e732858822025a70d6c7b8d23d60fff162c Jan 22 08:20:51 crc kubenswrapper[4982]: I0122 08:20:51.963155 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:20:52 crc kubenswrapper[4982]: I0122 08:20:52.713194 4982 generic.go:334] "Generic (PLEG): container finished" podID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerID="1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a" exitCode=0 Jan 22 08:20:52 crc kubenswrapper[4982]: I0122 08:20:52.713570 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerDied","Data":"1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a"} Jan 22 08:20:52 crc kubenswrapper[4982]: I0122 08:20:52.713607 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerStarted","Data":"29bd6f3278f2bdbae85e7bef45174e732858822025a70d6c7b8d23d60fff162c"} Jan 22 08:20:53 crc kubenswrapper[4982]: I0122 08:20:53.738631 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerStarted","Data":"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71"} Jan 22 08:20:55 crc kubenswrapper[4982]: I0122 08:20:55.760206 4982 generic.go:334] "Generic (PLEG): container finished" podID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerID="6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71" exitCode=0 Jan 22 08:20:55 crc kubenswrapper[4982]: I0122 08:20:55.760829 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerDied","Data":"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71"} Jan 22 08:20:56 crc kubenswrapper[4982]: I0122 08:20:56.773954 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerStarted","Data":"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d"} Jan 22 08:21:01 crc kubenswrapper[4982]: I0122 08:21:01.472380 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:01 crc kubenswrapper[4982]: I0122 08:21:01.475264 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:02 crc kubenswrapper[4982]: I0122 08:21:02.536601 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb5bf" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="registry-server" probeResult="failure" output=< Jan 22 08:21:02 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 08:21:02 crc kubenswrapper[4982]: > Jan 22 08:21:11 crc kubenswrapper[4982]: I0122 08:21:11.526016 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:11 crc kubenswrapper[4982]: I0122 08:21:11.553719 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cb5bf" podStartSLOduration=17.121750298 podStartE2EDuration="20.553701017s" podCreationTimestamp="2026-01-22 08:20:51 +0000 UTC" firstStartedPulling="2026-01-22 08:20:52.717397424 +0000 UTC m=+9313.556035467" lastFinishedPulling="2026-01-22 08:20:56.149348163 +0000 UTC m=+9316.987986186" observedRunningTime="2026-01-22 08:20:56.79217846 +0000 UTC m=+9317.630816473" watchObservedRunningTime="2026-01-22 08:21:11.553701017 +0000 UTC m=+9332.392339020" Jan 22 08:21:11 crc kubenswrapper[4982]: I0122 08:21:11.604623 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:11 crc kubenswrapper[4982]: I0122 08:21:11.763368 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:21:12 crc kubenswrapper[4982]: I0122 08:21:12.964225 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cb5bf" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="registry-server" containerID="cri-o://5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d" gracePeriod=2 Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.477640 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.604147 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content\") pod \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.604357 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities\") pod \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.604480 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ldk6\" (UniqueName: \"kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6\") pod \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\" (UID: \"a2ccae83-1c74-4cdb-b099-5dd5d783b561\") " Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.605608 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities" (OuterVolumeSpecName: "utilities") pod "a2ccae83-1c74-4cdb-b099-5dd5d783b561" (UID: "a2ccae83-1c74-4cdb-b099-5dd5d783b561"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.625004 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6" (OuterVolumeSpecName: "kube-api-access-7ldk6") pod "a2ccae83-1c74-4cdb-b099-5dd5d783b561" (UID: "a2ccae83-1c74-4cdb-b099-5dd5d783b561"). InnerVolumeSpecName "kube-api-access-7ldk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.707059 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.707092 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ldk6\" (UniqueName: \"kubernetes.io/projected/a2ccae83-1c74-4cdb-b099-5dd5d783b561-kube-api-access-7ldk6\") on node \"crc\" DevicePath \"\"" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.744971 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2ccae83-1c74-4cdb-b099-5dd5d783b561" (UID: "a2ccae83-1c74-4cdb-b099-5dd5d783b561"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.809531 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2ccae83-1c74-4cdb-b099-5dd5d783b561-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.976733 4982 generic.go:334] "Generic (PLEG): container finished" podID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerID="5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d" exitCode=0 Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.976793 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerDied","Data":"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d"} Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.976807 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb5bf" Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.976829 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb5bf" event={"ID":"a2ccae83-1c74-4cdb-b099-5dd5d783b561","Type":"ContainerDied","Data":"29bd6f3278f2bdbae85e7bef45174e732858822025a70d6c7b8d23d60fff162c"} Jan 22 08:21:13 crc kubenswrapper[4982]: I0122 08:21:13.976870 4982 scope.go:117] "RemoveContainer" containerID="5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.016617 4982 scope.go:117] "RemoveContainer" containerID="6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.042808 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.054228 4982 scope.go:117] "RemoveContainer" containerID="1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.057096 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cb5bf"] Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.102535 4982 scope.go:117] "RemoveContainer" containerID="5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d" Jan 22 08:21:14 crc kubenswrapper[4982]: E0122 08:21:14.102976 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d\": container with ID starting with 5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d not found: ID does not exist" containerID="5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.103018 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d"} err="failed to get container status \"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d\": rpc error: code = NotFound desc = could not find container \"5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d\": container with ID starting with 5c0800e43d87fb16ea219cbf77a4b6ffa0e8824c34df6e03fb460791cdfa8d8d not found: ID does not exist" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.103053 4982 scope.go:117] "RemoveContainer" containerID="6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71" Jan 22 08:21:14 crc kubenswrapper[4982]: E0122 08:21:14.103437 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71\": container with ID starting with 6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71 not found: ID does not exist" containerID="6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.103492 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71"} err="failed to get container status \"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71\": rpc error: code = NotFound desc = could not find container \"6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71\": container with ID starting with 6eed0d4817de634015199e891773d137a8c94ec305478dd077e1165ecebd4b71 not found: ID does not exist" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.103529 4982 scope.go:117] "RemoveContainer" containerID="1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a" Jan 22 08:21:14 crc kubenswrapper[4982]: E0122 08:21:14.103932 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a\": container with ID starting with 1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a not found: ID does not exist" containerID="1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a" Jan 22 08:21:14 crc kubenswrapper[4982]: I0122 08:21:14.103981 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a"} err="failed to get container status \"1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a\": rpc error: code = NotFound desc = could not find container \"1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a\": container with ID starting with 1fa2f17efb3010e48cdc71c7f5ab5ad45a1aebe14db6ccd27aebed79bef0216a not found: ID does not exist" Jan 22 08:21:15 crc kubenswrapper[4982]: I0122 08:21:15.753729 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" path="/var/lib/kubelet/pods/a2ccae83-1c74-4cdb-b099-5dd5d783b561/volumes" Jan 22 08:21:18 crc kubenswrapper[4982]: I0122 08:21:18.973591 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:21:18 crc kubenswrapper[4982]: I0122 08:21:18.974140 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:21:48 crc kubenswrapper[4982]: I0122 08:21:48.974526 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:21:48 crc kubenswrapper[4982]: I0122 08:21:48.975192 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:22:18 crc kubenswrapper[4982]: I0122 08:22:18.974294 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:22:18 crc kubenswrapper[4982]: I0122 08:22:18.974957 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:22:18 crc kubenswrapper[4982]: I0122 08:22:18.975036 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:22:18 crc kubenswrapper[4982]: I0122 08:22:18.976289 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:22:18 crc kubenswrapper[4982]: I0122 08:22:18.976408 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31" gracePeriod=600 Jan 22 08:22:19 crc kubenswrapper[4982]: I0122 08:22:19.786361 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31" exitCode=0 Jan 22 08:22:19 crc kubenswrapper[4982]: I0122 08:22:19.786453 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31"} Jan 22 08:22:19 crc kubenswrapper[4982]: I0122 08:22:19.787156 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3"} Jan 22 08:22:19 crc kubenswrapper[4982]: I0122 08:22:19.787266 4982 scope.go:117] "RemoveContainer" containerID="41a2e8f10c29d84ed0900a2d6e5aaabf6168aaf17baae9c7f573edf1e26d7aa4" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.526099 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:30 crc kubenswrapper[4982]: E0122 08:23:30.527366 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="registry-server" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.527386 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="registry-server" Jan 22 08:23:30 crc kubenswrapper[4982]: E0122 08:23:30.527401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="extract-content" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.527410 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="extract-content" Jan 22 08:23:30 crc kubenswrapper[4982]: E0122 08:23:30.527455 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="extract-utilities" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.527463 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="extract-utilities" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.527723 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ccae83-1c74-4cdb-b099-5dd5d783b561" containerName="registry-server" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.533279 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.545499 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.632954 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.633287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.633358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5zvp\" (UniqueName: \"kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.737468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.738376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.738522 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5zvp\" (UniqueName: \"kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.739054 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:30 crc kubenswrapper[4982]: I0122 08:23:30.739064 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:31 crc kubenswrapper[4982]: I0122 08:23:31.051218 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5zvp\" (UniqueName: \"kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp\") pod \"certified-operators-stptx\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:31 crc kubenswrapper[4982]: I0122 08:23:31.168052 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:31 crc kubenswrapper[4982]: I0122 08:23:31.687098 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:31 crc kubenswrapper[4982]: I0122 08:23:31.802212 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerStarted","Data":"b76254365ad6cf5926ee718209eb4b1f4932313856be665292498b4a4e15e1af"} Jan 22 08:23:32 crc kubenswrapper[4982]: I0122 08:23:32.820002 4982 generic.go:334] "Generic (PLEG): container finished" podID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerID="6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc" exitCode=0 Jan 22 08:23:32 crc kubenswrapper[4982]: I0122 08:23:32.820094 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerDied","Data":"6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc"} Jan 22 08:23:33 crc kubenswrapper[4982]: I0122 08:23:33.833538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerStarted","Data":"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38"} Jan 22 08:23:34 crc kubenswrapper[4982]: I0122 08:23:34.853419 4982 generic.go:334] "Generic (PLEG): container finished" podID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerID="f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38" exitCode=0 Jan 22 08:23:34 crc kubenswrapper[4982]: I0122 08:23:34.853488 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerDied","Data":"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38"} Jan 22 08:23:35 crc kubenswrapper[4982]: I0122 08:23:35.867466 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerStarted","Data":"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd"} Jan 22 08:23:35 crc kubenswrapper[4982]: I0122 08:23:35.898683 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-stptx" podStartSLOduration=3.454390294 podStartE2EDuration="5.898658705s" podCreationTimestamp="2026-01-22 08:23:30 +0000 UTC" firstStartedPulling="2026-01-22 08:23:32.824752855 +0000 UTC m=+9473.663390868" lastFinishedPulling="2026-01-22 08:23:35.269021246 +0000 UTC m=+9476.107659279" observedRunningTime="2026-01-22 08:23:35.893621408 +0000 UTC m=+9476.732259441" watchObservedRunningTime="2026-01-22 08:23:35.898658705 +0000 UTC m=+9476.737296718" Jan 22 08:23:41 crc kubenswrapper[4982]: I0122 08:23:41.168524 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:41 crc kubenswrapper[4982]: I0122 08:23:41.169057 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:42 crc kubenswrapper[4982]: I0122 08:23:42.022558 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:42 crc kubenswrapper[4982]: I0122 08:23:42.097640 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:42 crc kubenswrapper[4982]: I0122 08:23:42.268007 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:43 crc kubenswrapper[4982]: I0122 08:23:43.950618 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-stptx" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="registry-server" containerID="cri-o://896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd" gracePeriod=2 Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.542754 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.593281 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content\") pod \"717c1c49-ea15-47e5-af6d-dd1b0834a558\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.593499 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5zvp\" (UniqueName: \"kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp\") pod \"717c1c49-ea15-47e5-af6d-dd1b0834a558\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.593677 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities\") pod \"717c1c49-ea15-47e5-af6d-dd1b0834a558\" (UID: \"717c1c49-ea15-47e5-af6d-dd1b0834a558\") " Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.594677 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities" (OuterVolumeSpecName: "utilities") pod "717c1c49-ea15-47e5-af6d-dd1b0834a558" (UID: "717c1c49-ea15-47e5-af6d-dd1b0834a558"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.643188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp" (OuterVolumeSpecName: "kube-api-access-b5zvp") pod "717c1c49-ea15-47e5-af6d-dd1b0834a558" (UID: "717c1c49-ea15-47e5-af6d-dd1b0834a558"). InnerVolumeSpecName "kube-api-access-b5zvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.647750 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "717c1c49-ea15-47e5-af6d-dd1b0834a558" (UID: "717c1c49-ea15-47e5-af6d-dd1b0834a558"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.696932 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5zvp\" (UniqueName: \"kubernetes.io/projected/717c1c49-ea15-47e5-af6d-dd1b0834a558-kube-api-access-b5zvp\") on node \"crc\" DevicePath \"\"" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.696968 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.696981 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/717c1c49-ea15-47e5-af6d-dd1b0834a558-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.962417 4982 generic.go:334] "Generic (PLEG): container finished" podID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerID="896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd" exitCode=0 Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.962475 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stptx" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.962506 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerDied","Data":"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd"} Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.962914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stptx" event={"ID":"717c1c49-ea15-47e5-af6d-dd1b0834a558","Type":"ContainerDied","Data":"b76254365ad6cf5926ee718209eb4b1f4932313856be665292498b4a4e15e1af"} Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.962938 4982 scope.go:117] "RemoveContainer" containerID="896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd" Jan 22 08:23:44 crc kubenswrapper[4982]: I0122 08:23:44.989221 4982 scope.go:117] "RemoveContainer" containerID="f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.041338 4982 scope.go:117] "RemoveContainer" containerID="6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.041648 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.085348 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-stptx"] Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.096214 4982 scope.go:117] "RemoveContainer" containerID="896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd" Jan 22 08:23:45 crc kubenswrapper[4982]: E0122 08:23:45.096647 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd\": container with ID starting with 896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd not found: ID does not exist" containerID="896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.096762 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd"} err="failed to get container status \"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd\": rpc error: code = NotFound desc = could not find container \"896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd\": container with ID starting with 896d9c020f124f242e8c3b62ab00f7cf0dfc403a642edfbe00ba38f2231333cd not found: ID does not exist" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.096839 4982 scope.go:117] "RemoveContainer" containerID="f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38" Jan 22 08:23:45 crc kubenswrapper[4982]: E0122 08:23:45.097216 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38\": container with ID starting with f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38 not found: ID does not exist" containerID="f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.097251 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38"} err="failed to get container status \"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38\": rpc error: code = NotFound desc = could not find container \"f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38\": container with ID starting with f9861901b1a644374c051bc57c4111a20f70704294e5a195974dc96b19f6bf38 not found: ID does not exist" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.097269 4982 scope.go:117] "RemoveContainer" containerID="6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc" Jan 22 08:23:45 crc kubenswrapper[4982]: E0122 08:23:45.097470 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc\": container with ID starting with 6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc not found: ID does not exist" containerID="6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.097494 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc"} err="failed to get container status \"6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc\": rpc error: code = NotFound desc = could not find container \"6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc\": container with ID starting with 6b11453f948799eec144b014cd45f805212f6d55eed0ab0cdb42373af1c65ebc not found: ID does not exist" Jan 22 08:23:45 crc kubenswrapper[4982]: I0122 08:23:45.737796 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" path="/var/lib/kubelet/pods/717c1c49-ea15-47e5-af6d-dd1b0834a558/volumes" Jan 22 08:24:48 crc kubenswrapper[4982]: I0122 08:24:48.974555 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:24:48 crc kubenswrapper[4982]: I0122 08:24:48.975166 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:25:18 crc kubenswrapper[4982]: I0122 08:25:18.973543 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:25:18 crc kubenswrapper[4982]: I0122 08:25:18.974137 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:25:48 crc kubenswrapper[4982]: I0122 08:25:48.973687 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:25:48 crc kubenswrapper[4982]: I0122 08:25:48.974117 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:25:48 crc kubenswrapper[4982]: I0122 08:25:48.974222 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:25:48 crc kubenswrapper[4982]: I0122 08:25:48.974952 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:25:48 crc kubenswrapper[4982]: I0122 08:25:48.975003 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" gracePeriod=600 Jan 22 08:25:49 crc kubenswrapper[4982]: E0122 08:25:49.096880 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:25:49 crc kubenswrapper[4982]: I0122 08:25:49.557307 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" exitCode=0 Jan 22 08:25:49 crc kubenswrapper[4982]: I0122 08:25:49.557375 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3"} Jan 22 08:25:49 crc kubenswrapper[4982]: I0122 08:25:49.557424 4982 scope.go:117] "RemoveContainer" containerID="200707dc0938630f0897e0c4992e9deb8308b5b713cde5d988303b2702ee4a31" Jan 22 08:25:49 crc kubenswrapper[4982]: I0122 08:25:49.558731 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:25:49 crc kubenswrapper[4982]: E0122 08:25:49.559501 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:26:02 crc kubenswrapper[4982]: I0122 08:26:02.721700 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:26:02 crc kubenswrapper[4982]: E0122 08:26:02.722846 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:26:17 crc kubenswrapper[4982]: I0122 08:26:17.720521 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:26:17 crc kubenswrapper[4982]: E0122 08:26:17.722022 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:26:19 crc kubenswrapper[4982]: I0122 08:26:19.260738 4982 trace.go:236] Trace[219180678]: "Calculate volume metrics of ovndbcluster-nb-etc-ovn for pod openstack/ovsdbserver-nb-2" (22-Jan-2026 08:26:18.160) (total time: 1099ms): Jan 22 08:26:19 crc kubenswrapper[4982]: Trace[219180678]: [1.099797271s] [1.099797271s] END Jan 22 08:26:29 crc kubenswrapper[4982]: I0122 08:26:29.735390 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:26:29 crc kubenswrapper[4982]: E0122 08:26:29.736543 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:26:43 crc kubenswrapper[4982]: I0122 08:26:43.720162 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:26:43 crc kubenswrapper[4982]: E0122 08:26:43.721099 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:26:56 crc kubenswrapper[4982]: I0122 08:26:56.723062 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:26:56 crc kubenswrapper[4982]: E0122 08:26:56.724203 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:27:07 crc kubenswrapper[4982]: I0122 08:27:07.719867 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:27:07 crc kubenswrapper[4982]: E0122 08:27:07.720798 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.206318 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:10 crc kubenswrapper[4982]: E0122 08:27:10.207047 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="registry-server" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.207059 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="registry-server" Jan 22 08:27:10 crc kubenswrapper[4982]: E0122 08:27:10.207077 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="extract-content" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.207083 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="extract-content" Jan 22 08:27:10 crc kubenswrapper[4982]: E0122 08:27:10.207100 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="extract-utilities" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.207106 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="extract-utilities" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.207284 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="717c1c49-ea15-47e5-af6d-dd1b0834a558" containerName="registry-server" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.208761 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.224524 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.395822 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.395926 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.396009 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v55bj\" (UniqueName: \"kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.498082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v55bj\" (UniqueName: \"kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.498204 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.498269 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.498754 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.498929 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.541185 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v55bj\" (UniqueName: \"kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj\") pod \"redhat-marketplace-lptdr\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:10 crc kubenswrapper[4982]: I0122 08:27:10.541696 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.054996 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.407922 4982 scope.go:117] "RemoveContainer" containerID="9ca92011d8fc6f1e428f9f95f2f83939ca454db070d0c1e082c9b2cbfbb4ead4" Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.426112 4982 scope.go:117] "RemoveContainer" containerID="ab21fa8fc3928e19a2c853973645247f3773ce6985c6b795f7d41aff108a8b2c" Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.448318 4982 scope.go:117] "RemoveContainer" containerID="8ed98e4809f016387c6f3ede7e00b70e2644b9f283688b410f4ee226e75f4667" Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.705216 4982 generic.go:334] "Generic (PLEG): container finished" podID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerID="0734dd8f636bd0378772998b2b13e04478a908a6dc007df4e0abe1d3fdf6cece" exitCode=0 Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.705262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerDied","Data":"0734dd8f636bd0378772998b2b13e04478a908a6dc007df4e0abe1d3fdf6cece"} Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.705286 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerStarted","Data":"044dd65bd9c6e4c47a96f6bfe738691a1eda6d527f121690258ce123388ef2cc"} Jan 22 08:27:11 crc kubenswrapper[4982]: I0122 08:27:11.708499 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:27:12 crc kubenswrapper[4982]: I0122 08:27:12.723359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerStarted","Data":"2e9b8c5670ba2bb1b59b6f0f086c659006d0c9934519faf91ae9e82cb1a1cffa"} Jan 22 08:27:13 crc kubenswrapper[4982]: I0122 08:27:13.750191 4982 generic.go:334] "Generic (PLEG): container finished" podID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerID="2e9b8c5670ba2bb1b59b6f0f086c659006d0c9934519faf91ae9e82cb1a1cffa" exitCode=0 Jan 22 08:27:13 crc kubenswrapper[4982]: I0122 08:27:13.750274 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerDied","Data":"2e9b8c5670ba2bb1b59b6f0f086c659006d0c9934519faf91ae9e82cb1a1cffa"} Jan 22 08:27:14 crc kubenswrapper[4982]: I0122 08:27:14.761327 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerStarted","Data":"18a103c7b4cb44fa8f52565e7cd4e8fd2a180cacf4895892a4f78c352eec6718"} Jan 22 08:27:14 crc kubenswrapper[4982]: I0122 08:27:14.784833 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lptdr" podStartSLOduration=2.31648947 podStartE2EDuration="4.784812543s" podCreationTimestamp="2026-01-22 08:27:10 +0000 UTC" firstStartedPulling="2026-01-22 08:27:11.708292833 +0000 UTC m=+9692.546930826" lastFinishedPulling="2026-01-22 08:27:14.176615886 +0000 UTC m=+9695.015253899" observedRunningTime="2026-01-22 08:27:14.777563306 +0000 UTC m=+9695.616201329" watchObservedRunningTime="2026-01-22 08:27:14.784812543 +0000 UTC m=+9695.623450556" Jan 22 08:27:20 crc kubenswrapper[4982]: I0122 08:27:20.542663 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:20 crc kubenswrapper[4982]: I0122 08:27:20.543334 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:20 crc kubenswrapper[4982]: I0122 08:27:20.629085 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:20 crc kubenswrapper[4982]: I0122 08:27:20.879752 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:20 crc kubenswrapper[4982]: I0122 08:27:20.927034 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:22 crc kubenswrapper[4982]: I0122 08:27:22.719741 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:27:22 crc kubenswrapper[4982]: E0122 08:27:22.721038 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:27:22 crc kubenswrapper[4982]: I0122 08:27:22.863701 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lptdr" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="registry-server" containerID="cri-o://18a103c7b4cb44fa8f52565e7cd4e8fd2a180cacf4895892a4f78c352eec6718" gracePeriod=2 Jan 22 08:27:23 crc kubenswrapper[4982]: I0122 08:27:23.875314 4982 generic.go:334] "Generic (PLEG): container finished" podID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerID="18a103c7b4cb44fa8f52565e7cd4e8fd2a180cacf4895892a4f78c352eec6718" exitCode=0 Jan 22 08:27:23 crc kubenswrapper[4982]: I0122 08:27:23.875468 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerDied","Data":"18a103c7b4cb44fa8f52565e7cd4e8fd2a180cacf4895892a4f78c352eec6718"} Jan 22 08:27:23 crc kubenswrapper[4982]: I0122 08:27:23.875685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lptdr" event={"ID":"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c","Type":"ContainerDied","Data":"044dd65bd9c6e4c47a96f6bfe738691a1eda6d527f121690258ce123388ef2cc"} Jan 22 08:27:23 crc kubenswrapper[4982]: I0122 08:27:23.875703 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="044dd65bd9c6e4c47a96f6bfe738691a1eda6d527f121690258ce123388ef2cc" Jan 22 08:27:23 crc kubenswrapper[4982]: I0122 08:27:23.895041 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.069440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content\") pod \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.069720 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v55bj\" (UniqueName: \"kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj\") pod \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.069905 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities\") pod \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\" (UID: \"06b2a5d9-ca2f-4800-baf2-dbc362b23d4c\") " Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.071399 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities" (OuterVolumeSpecName: "utilities") pod "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" (UID: "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.082781 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj" (OuterVolumeSpecName: "kube-api-access-v55bj") pod "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" (UID: "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c"). InnerVolumeSpecName "kube-api-access-v55bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.112962 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" (UID: "06b2a5d9-ca2f-4800-baf2-dbc362b23d4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.174220 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v55bj\" (UniqueName: \"kubernetes.io/projected/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-kube-api-access-v55bj\") on node \"crc\" DevicePath \"\"" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.174255 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.174267 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.888608 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lptdr" Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.947516 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:24 crc kubenswrapper[4982]: I0122 08:27:24.960407 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lptdr"] Jan 22 08:27:25 crc kubenswrapper[4982]: I0122 08:27:25.736978 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" path="/var/lib/kubelet/pods/06b2a5d9-ca2f-4800-baf2-dbc362b23d4c/volumes" Jan 22 08:27:35 crc kubenswrapper[4982]: I0122 08:27:35.719097 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:27:35 crc kubenswrapper[4982]: E0122 08:27:35.719909 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:27:46 crc kubenswrapper[4982]: I0122 08:27:46.720401 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:27:46 crc kubenswrapper[4982]: E0122 08:27:46.721925 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:28:01 crc kubenswrapper[4982]: I0122 08:28:01.720380 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:28:01 crc kubenswrapper[4982]: E0122 08:28:01.721374 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:28:12 crc kubenswrapper[4982]: I0122 08:28:12.719731 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:28:12 crc kubenswrapper[4982]: E0122 08:28:12.720641 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:28:25 crc kubenswrapper[4982]: I0122 08:28:25.724212 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:28:25 crc kubenswrapper[4982]: E0122 08:28:25.725272 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:28:40 crc kubenswrapper[4982]: I0122 08:28:40.719326 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:28:40 crc kubenswrapper[4982]: E0122 08:28:40.720311 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:28:54 crc kubenswrapper[4982]: I0122 08:28:54.719557 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:28:54 crc kubenswrapper[4982]: E0122 08:28:54.720946 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:29:09 crc kubenswrapper[4982]: I0122 08:29:09.736979 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:29:09 crc kubenswrapper[4982]: E0122 08:29:09.738508 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:29:20 crc kubenswrapper[4982]: I0122 08:29:20.719443 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:29:20 crc kubenswrapper[4982]: E0122 08:29:20.720720 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:29:32 crc kubenswrapper[4982]: I0122 08:29:32.720209 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:29:32 crc kubenswrapper[4982]: E0122 08:29:32.721719 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:29:46 crc kubenswrapper[4982]: I0122 08:29:46.720736 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:29:46 crc kubenswrapper[4982]: E0122 08:29:46.721579 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:29:57 crc kubenswrapper[4982]: I0122 08:29:57.719436 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:29:57 crc kubenswrapper[4982]: E0122 08:29:57.720328 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.161519 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw"] Jan 22 08:30:00 crc kubenswrapper[4982]: E0122 08:30:00.162440 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="registry-server" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.162453 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="registry-server" Jan 22 08:30:00 crc kubenswrapper[4982]: E0122 08:30:00.162473 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="extract-utilities" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.162479 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="extract-utilities" Jan 22 08:30:00 crc kubenswrapper[4982]: E0122 08:30:00.162497 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="extract-content" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.162505 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="extract-content" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.162694 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="06b2a5d9-ca2f-4800-baf2-dbc362b23d4c" containerName="registry-server" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.163403 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.165635 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.165815 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.182595 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw"] Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.225641 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.225736 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.225928 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcxrr\" (UniqueName: \"kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.331528 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcxrr\" (UniqueName: \"kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.331667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.331757 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.333454 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.351567 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.357025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcxrr\" (UniqueName: \"kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr\") pod \"collect-profiles-29484510-6sgzw\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:00 crc kubenswrapper[4982]: I0122 08:30:00.484611 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:01 crc kubenswrapper[4982]: I0122 08:30:01.003031 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw"] Jan 22 08:30:01 crc kubenswrapper[4982]: I0122 08:30:01.892555 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" event={"ID":"47f17038-b04a-486c-bf65-c73c01b2e5cb","Type":"ContainerStarted","Data":"e93c2931f0da1c014c1d1af60f88bdf50bb908a1437e013f4b6fe7132b057bfb"} Jan 22 08:30:01 crc kubenswrapper[4982]: I0122 08:30:01.892943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" event={"ID":"47f17038-b04a-486c-bf65-c73c01b2e5cb","Type":"ContainerStarted","Data":"dde14437add34572600645dcb3a5867b6fdc5f93e8aebe05b41b1758e2f971d7"} Jan 22 08:30:01 crc kubenswrapper[4982]: I0122 08:30:01.938993 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" podStartSLOduration=1.938975998 podStartE2EDuration="1.938975998s" podCreationTimestamp="2026-01-22 08:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 08:30:01.919434288 +0000 UTC m=+9862.758072301" watchObservedRunningTime="2026-01-22 08:30:01.938975998 +0000 UTC m=+9862.777614001" Jan 22 08:30:02 crc kubenswrapper[4982]: I0122 08:30:02.908987 4982 generic.go:334] "Generic (PLEG): container finished" podID="47f17038-b04a-486c-bf65-c73c01b2e5cb" containerID="e93c2931f0da1c014c1d1af60f88bdf50bb908a1437e013f4b6fe7132b057bfb" exitCode=0 Jan 22 08:30:02 crc kubenswrapper[4982]: I0122 08:30:02.909280 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" event={"ID":"47f17038-b04a-486c-bf65-c73c01b2e5cb","Type":"ContainerDied","Data":"e93c2931f0da1c014c1d1af60f88bdf50bb908a1437e013f4b6fe7132b057bfb"} Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.420949 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.521541 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcxrr\" (UniqueName: \"kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr\") pod \"47f17038-b04a-486c-bf65-c73c01b2e5cb\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.521662 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume\") pod \"47f17038-b04a-486c-bf65-c73c01b2e5cb\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.521759 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume\") pod \"47f17038-b04a-486c-bf65-c73c01b2e5cb\" (UID: \"47f17038-b04a-486c-bf65-c73c01b2e5cb\") " Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.522681 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "47f17038-b04a-486c-bf65-c73c01b2e5cb" (UID: "47f17038-b04a-486c-bf65-c73c01b2e5cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.528602 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "47f17038-b04a-486c-bf65-c73c01b2e5cb" (UID: "47f17038-b04a-486c-bf65-c73c01b2e5cb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.529056 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr" (OuterVolumeSpecName: "kube-api-access-tcxrr") pod "47f17038-b04a-486c-bf65-c73c01b2e5cb" (UID: "47f17038-b04a-486c-bf65-c73c01b2e5cb"). InnerVolumeSpecName "kube-api-access-tcxrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.624387 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcxrr\" (UniqueName: \"kubernetes.io/projected/47f17038-b04a-486c-bf65-c73c01b2e5cb-kube-api-access-tcxrr\") on node \"crc\" DevicePath \"\"" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.624439 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47f17038-b04a-486c-bf65-c73c01b2e5cb-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.624456 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47f17038-b04a-486c-bf65-c73c01b2e5cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.932135 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" event={"ID":"47f17038-b04a-486c-bf65-c73c01b2e5cb","Type":"ContainerDied","Data":"dde14437add34572600645dcb3a5867b6fdc5f93e8aebe05b41b1758e2f971d7"} Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.932486 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dde14437add34572600645dcb3a5867b6fdc5f93e8aebe05b41b1758e2f971d7" Jan 22 08:30:04 crc kubenswrapper[4982]: I0122 08:30:04.932223 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484510-6sgzw" Jan 22 08:30:05 crc kubenswrapper[4982]: I0122 08:30:05.515105 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2"] Jan 22 08:30:05 crc kubenswrapper[4982]: I0122 08:30:05.528049 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484465-nfrl2"] Jan 22 08:30:05 crc kubenswrapper[4982]: I0122 08:30:05.744388 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4c3901-24d8-48d4-aa6b-134a6b2cab71" path="/var/lib/kubelet/pods/7e4c3901-24d8-48d4-aa6b-134a6b2cab71/volumes" Jan 22 08:30:10 crc kubenswrapper[4982]: I0122 08:30:10.719517 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:30:10 crc kubenswrapper[4982]: E0122 08:30:10.720314 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:30:11 crc kubenswrapper[4982]: I0122 08:30:11.563802 4982 scope.go:117] "RemoveContainer" containerID="2f20e3b64882525c0eebf2eaf1a92887caf33d9733b9e6dcd874f6b737ff33f9" Jan 22 08:30:25 crc kubenswrapper[4982]: I0122 08:30:25.720565 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:30:25 crc kubenswrapper[4982]: E0122 08:30:25.721468 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:30:40 crc kubenswrapper[4982]: I0122 08:30:40.721236 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:30:40 crc kubenswrapper[4982]: E0122 08:30:40.722686 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.701783 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:30:52 crc kubenswrapper[4982]: E0122 08:30:52.704646 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f17038-b04a-486c-bf65-c73c01b2e5cb" containerName="collect-profiles" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.704684 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f17038-b04a-486c-bf65-c73c01b2e5cb" containerName="collect-profiles" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.705132 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="47f17038-b04a-486c-bf65-c73c01b2e5cb" containerName="collect-profiles" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.708966 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.718831 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.816037 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctqfc\" (UniqueName: \"kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.816370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.816539 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.918218 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctqfc\" (UniqueName: \"kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.918602 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.918648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.919351 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.919409 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:52 crc kubenswrapper[4982]: I0122 08:30:52.939728 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctqfc\" (UniqueName: \"kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc\") pod \"community-operators-98brh\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:53 crc kubenswrapper[4982]: I0122 08:30:53.041772 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:30:53 crc kubenswrapper[4982]: I0122 08:30:53.576150 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:30:54 crc kubenswrapper[4982]: I0122 08:30:54.521551 4982 generic.go:334] "Generic (PLEG): container finished" podID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerID="15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce" exitCode=0 Jan 22 08:30:54 crc kubenswrapper[4982]: I0122 08:30:54.522390 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerDied","Data":"15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce"} Jan 22 08:30:54 crc kubenswrapper[4982]: I0122 08:30:54.522444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerStarted","Data":"8b6a3f01aeb00ad0cbfcb33b66576e9b5f0b4f24b47e1485c620fdc76582b5ce"} Jan 22 08:30:54 crc kubenswrapper[4982]: I0122 08:30:54.721172 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.086728 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.097648 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.111686 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.172367 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.172563 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.172649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-684rw\" (UniqueName: \"kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.275768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.275885 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-684rw\" (UniqueName: \"kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.275956 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.276517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.276837 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.297164 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-684rw\" (UniqueName: \"kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw\") pod \"redhat-operators-mkj58\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.426432 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.570863 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerStarted","Data":"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a"} Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.578648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d"} Jan 22 08:30:55 crc kubenswrapper[4982]: W0122 08:30:55.952469 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23075886_e3d9_4451_9384_7be2259f4a9c.slice/crio-5dc0971f6f313d0f6d62484077dc251c6fd35521af98107bbcee1a91ee194ed1 WatchSource:0}: Error finding container 5dc0971f6f313d0f6d62484077dc251c6fd35521af98107bbcee1a91ee194ed1: Status 404 returned error can't find the container with id 5dc0971f6f313d0f6d62484077dc251c6fd35521af98107bbcee1a91ee194ed1 Jan 22 08:30:55 crc kubenswrapper[4982]: I0122 08:30:55.954046 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:30:56 crc kubenswrapper[4982]: I0122 08:30:56.588508 4982 generic.go:334] "Generic (PLEG): container finished" podID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerID="19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a" exitCode=0 Jan 22 08:30:56 crc kubenswrapper[4982]: I0122 08:30:56.588617 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerDied","Data":"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a"} Jan 22 08:30:56 crc kubenswrapper[4982]: I0122 08:30:56.591285 4982 generic.go:334] "Generic (PLEG): container finished" podID="23075886-e3d9-4451-9384-7be2259f4a9c" containerID="74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154" exitCode=0 Jan 22 08:30:56 crc kubenswrapper[4982]: I0122 08:30:56.591316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerDied","Data":"74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154"} Jan 22 08:30:56 crc kubenswrapper[4982]: I0122 08:30:56.591338 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerStarted","Data":"5dc0971f6f313d0f6d62484077dc251c6fd35521af98107bbcee1a91ee194ed1"} Jan 22 08:30:57 crc kubenswrapper[4982]: I0122 08:30:57.602913 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerStarted","Data":"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71"} Jan 22 08:30:57 crc kubenswrapper[4982]: I0122 08:30:57.629209 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-98brh" podStartSLOduration=3.059941388 podStartE2EDuration="5.629188099s" podCreationTimestamp="2026-01-22 08:30:52 +0000 UTC" firstStartedPulling="2026-01-22 08:30:54.527249469 +0000 UTC m=+9915.365887492" lastFinishedPulling="2026-01-22 08:30:57.09649618 +0000 UTC m=+9917.935134203" observedRunningTime="2026-01-22 08:30:57.626027423 +0000 UTC m=+9918.464665436" watchObservedRunningTime="2026-01-22 08:30:57.629188099 +0000 UTC m=+9918.467826102" Jan 22 08:30:58 crc kubenswrapper[4982]: I0122 08:30:58.630301 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerStarted","Data":"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04"} Jan 22 08:31:01 crc kubenswrapper[4982]: I0122 08:31:01.676044 4982 generic.go:334] "Generic (PLEG): container finished" podID="23075886-e3d9-4451-9384-7be2259f4a9c" containerID="353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04" exitCode=0 Jan 22 08:31:01 crc kubenswrapper[4982]: I0122 08:31:01.676965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerDied","Data":"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04"} Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.041884 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.042428 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.102880 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.696285 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerStarted","Data":"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d"} Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.724230 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mkj58" podStartSLOduration=2.744994821 podStartE2EDuration="8.724210957s" podCreationTimestamp="2026-01-22 08:30:55 +0000 UTC" firstStartedPulling="2026-01-22 08:30:56.592842728 +0000 UTC m=+9917.431480731" lastFinishedPulling="2026-01-22 08:31:02.572058864 +0000 UTC m=+9923.410696867" observedRunningTime="2026-01-22 08:31:03.717317269 +0000 UTC m=+9924.555955282" watchObservedRunningTime="2026-01-22 08:31:03.724210957 +0000 UTC m=+9924.562848960" Jan 22 08:31:03 crc kubenswrapper[4982]: I0122 08:31:03.762840 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:05 crc kubenswrapper[4982]: I0122 08:31:05.426707 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:05 crc kubenswrapper[4982]: I0122 08:31:05.427545 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:05 crc kubenswrapper[4982]: I0122 08:31:05.878833 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:31:05 crc kubenswrapper[4982]: I0122 08:31:05.879130 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-98brh" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="registry-server" containerID="cri-o://6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71" gracePeriod=2 Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.423771 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.452397 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content\") pod \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.452501 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctqfc\" (UniqueName: \"kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc\") pod \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.452618 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities\") pod \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\" (UID: \"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735\") " Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.453671 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities" (OuterVolumeSpecName: "utilities") pod "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" (UID: "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.471371 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc" (OuterVolumeSpecName: "kube-api-access-ctqfc") pod "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" (UID: "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735"). InnerVolumeSpecName "kube-api-access-ctqfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.508582 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mkj58" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="registry-server" probeResult="failure" output=< Jan 22 08:31:06 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 08:31:06 crc kubenswrapper[4982]: > Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.521618 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" (UID: "183f60fc-1ba1-436a-a2f4-dcb1ddc4e735"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.555557 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.555638 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.555692 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctqfc\" (UniqueName: \"kubernetes.io/projected/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735-kube-api-access-ctqfc\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.726935 4982 generic.go:334] "Generic (PLEG): container finished" podID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerID="6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71" exitCode=0 Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.727033 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-98brh" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.727061 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerDied","Data":"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71"} Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.727466 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-98brh" event={"ID":"183f60fc-1ba1-436a-a2f4-dcb1ddc4e735","Type":"ContainerDied","Data":"8b6a3f01aeb00ad0cbfcb33b66576e9b5f0b4f24b47e1485c620fdc76582b5ce"} Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.727498 4982 scope.go:117] "RemoveContainer" containerID="6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.765266 4982 scope.go:117] "RemoveContainer" containerID="19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.768995 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.789300 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-98brh"] Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.795256 4982 scope.go:117] "RemoveContainer" containerID="15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.834000 4982 scope.go:117] "RemoveContainer" containerID="6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71" Jan 22 08:31:06 crc kubenswrapper[4982]: E0122 08:31:06.834493 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71\": container with ID starting with 6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71 not found: ID does not exist" containerID="6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.834568 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71"} err="failed to get container status \"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71\": rpc error: code = NotFound desc = could not find container \"6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71\": container with ID starting with 6422d0637a4e07f3d9803621fc729585a61dc96b910768f599a1b91bc4c10f71 not found: ID does not exist" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.834614 4982 scope.go:117] "RemoveContainer" containerID="19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a" Jan 22 08:31:06 crc kubenswrapper[4982]: E0122 08:31:06.835297 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a\": container with ID starting with 19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a not found: ID does not exist" containerID="19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.835337 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a"} err="failed to get container status \"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a\": rpc error: code = NotFound desc = could not find container \"19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a\": container with ID starting with 19f136cb01c5e8964cc4b2cd661b2900a0948257e32b55e896a6a9bfcdd20f6a not found: ID does not exist" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.835385 4982 scope.go:117] "RemoveContainer" containerID="15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce" Jan 22 08:31:06 crc kubenswrapper[4982]: E0122 08:31:06.835748 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce\": container with ID starting with 15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce not found: ID does not exist" containerID="15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce" Jan 22 08:31:06 crc kubenswrapper[4982]: I0122 08:31:06.835803 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce"} err="failed to get container status \"15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce\": rpc error: code = NotFound desc = could not find container \"15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce\": container with ID starting with 15a0d768a07678d91e2cc1f2310a57e1dc6b6db23dfee14d0e58fd88ca1c5dce not found: ID does not exist" Jan 22 08:31:07 crc kubenswrapper[4982]: I0122 08:31:07.730084 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" path="/var/lib/kubelet/pods/183f60fc-1ba1-436a-a2f4-dcb1ddc4e735/volumes" Jan 22 08:31:15 crc kubenswrapper[4982]: I0122 08:31:15.509682 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:15 crc kubenswrapper[4982]: I0122 08:31:15.597224 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:16 crc kubenswrapper[4982]: I0122 08:31:16.162968 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:31:16 crc kubenswrapper[4982]: I0122 08:31:16.871760 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mkj58" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="registry-server" containerID="cri-o://2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d" gracePeriod=2 Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.404521 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.525947 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities\") pod \"23075886-e3d9-4451-9384-7be2259f4a9c\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.525994 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content\") pod \"23075886-e3d9-4451-9384-7be2259f4a9c\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.526104 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-684rw\" (UniqueName: \"kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw\") pod \"23075886-e3d9-4451-9384-7be2259f4a9c\" (UID: \"23075886-e3d9-4451-9384-7be2259f4a9c\") " Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.528029 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities" (OuterVolumeSpecName: "utilities") pod "23075886-e3d9-4451-9384-7be2259f4a9c" (UID: "23075886-e3d9-4451-9384-7be2259f4a9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.537226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw" (OuterVolumeSpecName: "kube-api-access-684rw") pod "23075886-e3d9-4451-9384-7be2259f4a9c" (UID: "23075886-e3d9-4451-9384-7be2259f4a9c"). InnerVolumeSpecName "kube-api-access-684rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.628690 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-684rw\" (UniqueName: \"kubernetes.io/projected/23075886-e3d9-4451-9384-7be2259f4a9c-kube-api-access-684rw\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.629025 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.655796 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23075886-e3d9-4451-9384-7be2259f4a9c" (UID: "23075886-e3d9-4451-9384-7be2259f4a9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.731473 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23075886-e3d9-4451-9384-7be2259f4a9c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.888382 4982 generic.go:334] "Generic (PLEG): container finished" podID="23075886-e3d9-4451-9384-7be2259f4a9c" containerID="2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d" exitCode=0 Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.888423 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerDied","Data":"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d"} Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.888449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkj58" event={"ID":"23075886-e3d9-4451-9384-7be2259f4a9c","Type":"ContainerDied","Data":"5dc0971f6f313d0f6d62484077dc251c6fd35521af98107bbcee1a91ee194ed1"} Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.888464 4982 scope.go:117] "RemoveContainer" containerID="2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.888593 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkj58" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.917823 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.920596 4982 scope.go:117] "RemoveContainer" containerID="353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.931717 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mkj58"] Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.948977 4982 scope.go:117] "RemoveContainer" containerID="74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.990145 4982 scope.go:117] "RemoveContainer" containerID="2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d" Jan 22 08:31:17 crc kubenswrapper[4982]: E0122 08:31:17.990613 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d\": container with ID starting with 2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d not found: ID does not exist" containerID="2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.990654 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d"} err="failed to get container status \"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d\": rpc error: code = NotFound desc = could not find container \"2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d\": container with ID starting with 2e3a9b694555311fecdcb621a2f41997777b8c124be46db160e887285ac6ae6d not found: ID does not exist" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.990673 4982 scope.go:117] "RemoveContainer" containerID="353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04" Jan 22 08:31:17 crc kubenswrapper[4982]: E0122 08:31:17.991010 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04\": container with ID starting with 353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04 not found: ID does not exist" containerID="353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.991039 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04"} err="failed to get container status \"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04\": rpc error: code = NotFound desc = could not find container \"353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04\": container with ID starting with 353b71ad4cf632c9b797f788826b9e56d28f8fdfe2eded01a0af3f143cb02f04 not found: ID does not exist" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.991056 4982 scope.go:117] "RemoveContainer" containerID="74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154" Jan 22 08:31:17 crc kubenswrapper[4982]: E0122 08:31:17.991458 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154\": container with ID starting with 74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154 not found: ID does not exist" containerID="74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154" Jan 22 08:31:17 crc kubenswrapper[4982]: I0122 08:31:17.991504 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154"} err="failed to get container status \"74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154\": rpc error: code = NotFound desc = could not find container \"74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154\": container with ID starting with 74f4013fad6ff06ba2927e6b2f0d772a67b4772c4d1e839113837c9f63fa5154 not found: ID does not exist" Jan 22 08:31:19 crc kubenswrapper[4982]: I0122 08:31:19.749759 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" path="/var/lib/kubelet/pods/23075886-e3d9-4451-9384-7be2259f4a9c/volumes" Jan 22 08:33:11 crc kubenswrapper[4982]: I0122 08:33:11.734288 4982 scope.go:117] "RemoveContainer" containerID="0734dd8f636bd0378772998b2b13e04478a908a6dc007df4e0abe1d3fdf6cece" Jan 22 08:33:18 crc kubenswrapper[4982]: I0122 08:33:18.973652 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:33:18 crc kubenswrapper[4982]: I0122 08:33:18.974278 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:33:48 crc kubenswrapper[4982]: I0122 08:33:48.974167 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:33:48 crc kubenswrapper[4982]: I0122 08:33:48.975173 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:34:11 crc kubenswrapper[4982]: I0122 08:34:11.799249 4982 scope.go:117] "RemoveContainer" containerID="2e9b8c5670ba2bb1b59b6f0f086c659006d0c9934519faf91ae9e82cb1a1cffa" Jan 22 08:34:11 crc kubenswrapper[4982]: I0122 08:34:11.887346 4982 scope.go:117] "RemoveContainer" containerID="18a103c7b4cb44fa8f52565e7cd4e8fd2a180cacf4895892a4f78c352eec6718" Jan 22 08:34:18 crc kubenswrapper[4982]: I0122 08:34:18.973625 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:34:18 crc kubenswrapper[4982]: I0122 08:34:18.974295 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:34:18 crc kubenswrapper[4982]: I0122 08:34:18.974340 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:34:18 crc kubenswrapper[4982]: I0122 08:34:18.975097 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:34:18 crc kubenswrapper[4982]: I0122 08:34:18.975153 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d" gracePeriod=600 Jan 22 08:34:20 crc kubenswrapper[4982]: I0122 08:34:20.067305 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d" exitCode=0 Jan 22 08:34:20 crc kubenswrapper[4982]: I0122 08:34:20.067376 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d"} Jan 22 08:34:20 crc kubenswrapper[4982]: I0122 08:34:20.067836 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181"} Jan 22 08:34:20 crc kubenswrapper[4982]: I0122 08:34:20.067945 4982 scope.go:117] "RemoveContainer" containerID="aca388898cd21a349b29dc1e993546f5c68af05cbd20badd7b2970ef102aacf3" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.243272 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252804 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="extract-utilities" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252822 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="extract-utilities" Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252841 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="extract-content" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252847 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="extract-content" Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252881 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252887 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252897 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252902 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252921 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="extract-content" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252927 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="extract-content" Jan 22 08:34:51 crc kubenswrapper[4982]: E0122 08:34:51.252933 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="extract-utilities" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.252939 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="extract-utilities" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.253143 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="23075886-e3d9-4451-9384-7be2259f4a9c" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.253153 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="183f60fc-1ba1-436a-a2f4-dcb1ddc4e735" containerName="registry-server" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.254600 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.302989 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.355238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv4gf\" (UniqueName: \"kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.355392 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.355426 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.456798 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.456866 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.456995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv4gf\" (UniqueName: \"kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.457425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.457467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.744982 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv4gf\" (UniqueName: \"kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf\") pod \"certified-operators-ngfhx\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:51 crc kubenswrapper[4982]: I0122 08:34:51.897938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:34:52 crc kubenswrapper[4982]: I0122 08:34:52.326234 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:34:52 crc kubenswrapper[4982]: I0122 08:34:52.472104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerStarted","Data":"df1ab0d75459685d11a522c958e5ce9c4395916ccf980b50c3f661461fd69868"} Jan 22 08:34:53 crc kubenswrapper[4982]: I0122 08:34:53.484391 4982 generic.go:334] "Generic (PLEG): container finished" podID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerID="152c528c166538baca8b208ff2844eabf3e2a2cfb0263756984a50b51a0a3e7c" exitCode=0 Jan 22 08:34:53 crc kubenswrapper[4982]: I0122 08:34:53.485009 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerDied","Data":"152c528c166538baca8b208ff2844eabf3e2a2cfb0263756984a50b51a0a3e7c"} Jan 22 08:34:53 crc kubenswrapper[4982]: I0122 08:34:53.488438 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:34:55 crc kubenswrapper[4982]: I0122 08:34:55.512108 4982 generic.go:334] "Generic (PLEG): container finished" podID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerID="56bc86c778b374b879c9dd1536a768d11516a42a0df6c64d94ec4ce1e12c0faa" exitCode=0 Jan 22 08:34:55 crc kubenswrapper[4982]: I0122 08:34:55.512222 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerDied","Data":"56bc86c778b374b879c9dd1536a768d11516a42a0df6c64d94ec4ce1e12c0faa"} Jan 22 08:34:56 crc kubenswrapper[4982]: I0122 08:34:56.527738 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerStarted","Data":"ddaa3d4ea3e4eefb9d5c5d1ea399336bd14b1309a82881a616d9b1bae7d2e6cd"} Jan 22 08:34:56 crc kubenswrapper[4982]: I0122 08:34:56.568532 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ngfhx" podStartSLOduration=3.015597082 podStartE2EDuration="5.56850791s" podCreationTimestamp="2026-01-22 08:34:51 +0000 UTC" firstStartedPulling="2026-01-22 08:34:53.488049872 +0000 UTC m=+10154.326687885" lastFinishedPulling="2026-01-22 08:34:56.04096071 +0000 UTC m=+10156.879598713" observedRunningTime="2026-01-22 08:34:56.560876323 +0000 UTC m=+10157.399514356" watchObservedRunningTime="2026-01-22 08:34:56.56850791 +0000 UTC m=+10157.407145943" Jan 22 08:35:01 crc kubenswrapper[4982]: I0122 08:35:01.898271 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:01 crc kubenswrapper[4982]: I0122 08:35:01.901423 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:01 crc kubenswrapper[4982]: I0122 08:35:01.985777 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:02 crc kubenswrapper[4982]: I0122 08:35:02.683438 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:02 crc kubenswrapper[4982]: I0122 08:35:02.751467 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:35:04 crc kubenswrapper[4982]: I0122 08:35:04.641323 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ngfhx" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="registry-server" containerID="cri-o://ddaa3d4ea3e4eefb9d5c5d1ea399336bd14b1309a82881a616d9b1bae7d2e6cd" gracePeriod=2 Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.657672 4982 generic.go:334] "Generic (PLEG): container finished" podID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerID="ddaa3d4ea3e4eefb9d5c5d1ea399336bd14b1309a82881a616d9b1bae7d2e6cd" exitCode=0 Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.657774 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerDied","Data":"ddaa3d4ea3e4eefb9d5c5d1ea399336bd14b1309a82881a616d9b1bae7d2e6cd"} Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.759516 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.825336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content\") pod \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.825579 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv4gf\" (UniqueName: \"kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf\") pod \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.825646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities\") pod \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\" (UID: \"97bc7c03-c26d-4ae4-9553-9ba23da64bd3\") " Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.826758 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities" (OuterVolumeSpecName: "utilities") pod "97bc7c03-c26d-4ae4-9553-9ba23da64bd3" (UID: "97bc7c03-c26d-4ae4-9553-9ba23da64bd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.836463 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf" (OuterVolumeSpecName: "kube-api-access-pv4gf") pod "97bc7c03-c26d-4ae4-9553-9ba23da64bd3" (UID: "97bc7c03-c26d-4ae4-9553-9ba23da64bd3"). InnerVolumeSpecName "kube-api-access-pv4gf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.880369 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97bc7c03-c26d-4ae4-9553-9ba23da64bd3" (UID: "97bc7c03-c26d-4ae4-9553-9ba23da64bd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.929126 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.929159 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv4gf\" (UniqueName: \"kubernetes.io/projected/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-kube-api-access-pv4gf\") on node \"crc\" DevicePath \"\"" Jan 22 08:35:05 crc kubenswrapper[4982]: I0122 08:35:05.929169 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97bc7c03-c26d-4ae4-9553-9ba23da64bd3-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.673184 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ngfhx" event={"ID":"97bc7c03-c26d-4ae4-9553-9ba23da64bd3","Type":"ContainerDied","Data":"df1ab0d75459685d11a522c958e5ce9c4395916ccf980b50c3f661461fd69868"} Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.673266 4982 scope.go:117] "RemoveContainer" containerID="ddaa3d4ea3e4eefb9d5c5d1ea399336bd14b1309a82881a616d9b1bae7d2e6cd" Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.673395 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ngfhx" Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.722541 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.725671 4982 scope.go:117] "RemoveContainer" containerID="56bc86c778b374b879c9dd1536a768d11516a42a0df6c64d94ec4ce1e12c0faa" Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.732492 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ngfhx"] Jan 22 08:35:06 crc kubenswrapper[4982]: I0122 08:35:06.755283 4982 scope.go:117] "RemoveContainer" containerID="152c528c166538baca8b208ff2844eabf3e2a2cfb0263756984a50b51a0a3e7c" Jan 22 08:35:07 crc kubenswrapper[4982]: I0122 08:35:07.738977 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" path="/var/lib/kubelet/pods/97bc7c03-c26d-4ae4-9553-9ba23da64bd3/volumes" Jan 22 08:36:03 crc kubenswrapper[4982]: I0122 08:36:03.664780 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fbbcdb4d6nk65g" podUID="47ed3df3-a23e-4021-b786-b99d1b710639" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.89:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 08:36:48 crc kubenswrapper[4982]: I0122 08:36:48.974527 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:36:48 crc kubenswrapper[4982]: I0122 08:36:48.975121 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.283555 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:15 crc kubenswrapper[4982]: E0122 08:37:15.284646 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="extract-content" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.284673 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="extract-content" Jan 22 08:37:15 crc kubenswrapper[4982]: E0122 08:37:15.284689 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="extract-utilities" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.284697 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="extract-utilities" Jan 22 08:37:15 crc kubenswrapper[4982]: E0122 08:37:15.284733 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="registry-server" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.284744 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="registry-server" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.285004 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="97bc7c03-c26d-4ae4-9553-9ba23da64bd3" containerName="registry-server" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.286917 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.311463 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.428762 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.428830 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s7mq\" (UniqueName: \"kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.429015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.530916 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.531024 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.531063 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s7mq\" (UniqueName: \"kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.531801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.532034 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.557283 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s7mq\" (UniqueName: \"kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq\") pod \"redhat-marketplace-85v7b\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:15 crc kubenswrapper[4982]: I0122 08:37:15.614781 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:16 crc kubenswrapper[4982]: I0122 08:37:16.089872 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:16 crc kubenswrapper[4982]: W0122 08:37:16.093250 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcefb7079_155c_4edf_8acf_5e918e26b4ed.slice/crio-382e0bc4fb7e9cde90cb2cdc7d07ad1c21e308c274f92cde9124b951bd8e1dbd WatchSource:0}: Error finding container 382e0bc4fb7e9cde90cb2cdc7d07ad1c21e308c274f92cde9124b951bd8e1dbd: Status 404 returned error can't find the container with id 382e0bc4fb7e9cde90cb2cdc7d07ad1c21e308c274f92cde9124b951bd8e1dbd Jan 22 08:37:16 crc kubenswrapper[4982]: I0122 08:37:16.589287 4982 generic.go:334] "Generic (PLEG): container finished" podID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerID="0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1" exitCode=0 Jan 22 08:37:16 crc kubenswrapper[4982]: I0122 08:37:16.589451 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerDied","Data":"0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1"} Jan 22 08:37:16 crc kubenswrapper[4982]: I0122 08:37:16.589667 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerStarted","Data":"382e0bc4fb7e9cde90cb2cdc7d07ad1c21e308c274f92cde9124b951bd8e1dbd"} Jan 22 08:37:17 crc kubenswrapper[4982]: I0122 08:37:17.610746 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerStarted","Data":"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123"} Jan 22 08:37:18 crc kubenswrapper[4982]: I0122 08:37:18.627340 4982 generic.go:334] "Generic (PLEG): container finished" podID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerID="0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123" exitCode=0 Jan 22 08:37:18 crc kubenswrapper[4982]: I0122 08:37:18.627640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerDied","Data":"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123"} Jan 22 08:37:18 crc kubenswrapper[4982]: I0122 08:37:18.974406 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:37:18 crc kubenswrapper[4982]: I0122 08:37:18.974820 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:37:19 crc kubenswrapper[4982]: I0122 08:37:19.645789 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerStarted","Data":"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088"} Jan 22 08:37:19 crc kubenswrapper[4982]: I0122 08:37:19.679961 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-85v7b" podStartSLOduration=2.255137434 podStartE2EDuration="4.679930986s" podCreationTimestamp="2026-01-22 08:37:15 +0000 UTC" firstStartedPulling="2026-01-22 08:37:16.591741229 +0000 UTC m=+10297.430379232" lastFinishedPulling="2026-01-22 08:37:19.016534731 +0000 UTC m=+10299.855172784" observedRunningTime="2026-01-22 08:37:19.675390052 +0000 UTC m=+10300.514028075" watchObservedRunningTime="2026-01-22 08:37:19.679930986 +0000 UTC m=+10300.518569029" Jan 22 08:37:25 crc kubenswrapper[4982]: I0122 08:37:25.616052 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:25 crc kubenswrapper[4982]: I0122 08:37:25.616559 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:25 crc kubenswrapper[4982]: I0122 08:37:25.689023 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:25 crc kubenswrapper[4982]: I0122 08:37:25.778391 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:25 crc kubenswrapper[4982]: I0122 08:37:25.931957 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:27 crc kubenswrapper[4982]: I0122 08:37:27.756701 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-85v7b" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="registry-server" containerID="cri-o://0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088" gracePeriod=2 Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.376618 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.494091 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content\") pod \"cefb7079-155c-4edf-8acf-5e918e26b4ed\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.494191 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities\") pod \"cefb7079-155c-4edf-8acf-5e918e26b4ed\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.494217 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s7mq\" (UniqueName: \"kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq\") pod \"cefb7079-155c-4edf-8acf-5e918e26b4ed\" (UID: \"cefb7079-155c-4edf-8acf-5e918e26b4ed\") " Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.495517 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities" (OuterVolumeSpecName: "utilities") pod "cefb7079-155c-4edf-8acf-5e918e26b4ed" (UID: "cefb7079-155c-4edf-8acf-5e918e26b4ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.499961 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq" (OuterVolumeSpecName: "kube-api-access-4s7mq") pod "cefb7079-155c-4edf-8acf-5e918e26b4ed" (UID: "cefb7079-155c-4edf-8acf-5e918e26b4ed"). InnerVolumeSpecName "kube-api-access-4s7mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.519022 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cefb7079-155c-4edf-8acf-5e918e26b4ed" (UID: "cefb7079-155c-4edf-8acf-5e918e26b4ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.596408 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.596439 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cefb7079-155c-4edf-8acf-5e918e26b4ed-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.596450 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s7mq\" (UniqueName: \"kubernetes.io/projected/cefb7079-155c-4edf-8acf-5e918e26b4ed-kube-api-access-4s7mq\") on node \"crc\" DevicePath \"\"" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.771773 4982 generic.go:334] "Generic (PLEG): container finished" podID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerID="0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088" exitCode=0 Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.771814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerDied","Data":"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088"} Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.771842 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85v7b" event={"ID":"cefb7079-155c-4edf-8acf-5e918e26b4ed","Type":"ContainerDied","Data":"382e0bc4fb7e9cde90cb2cdc7d07ad1c21e308c274f92cde9124b951bd8e1dbd"} Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.771952 4982 scope.go:117] "RemoveContainer" containerID="0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.772045 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85v7b" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.807099 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.816096 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-85v7b"] Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.818241 4982 scope.go:117] "RemoveContainer" containerID="0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.844620 4982 scope.go:117] "RemoveContainer" containerID="0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.900471 4982 scope.go:117] "RemoveContainer" containerID="0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088" Jan 22 08:37:28 crc kubenswrapper[4982]: E0122 08:37:28.903194 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088\": container with ID starting with 0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088 not found: ID does not exist" containerID="0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.903706 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088"} err="failed to get container status \"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088\": rpc error: code = NotFound desc = could not find container \"0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088\": container with ID starting with 0ab7f143be99eef925907b8c4f6173eda36c8af164e901ce90388998d640f088 not found: ID does not exist" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.903746 4982 scope.go:117] "RemoveContainer" containerID="0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123" Jan 22 08:37:28 crc kubenswrapper[4982]: E0122 08:37:28.904231 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123\": container with ID starting with 0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123 not found: ID does not exist" containerID="0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.904263 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123"} err="failed to get container status \"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123\": rpc error: code = NotFound desc = could not find container \"0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123\": container with ID starting with 0afe118cf00f327d9be13b53bc33f3fb6e99b2e0e19693827e2825e27344d123 not found: ID does not exist" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.904285 4982 scope.go:117] "RemoveContainer" containerID="0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1" Jan 22 08:37:28 crc kubenswrapper[4982]: E0122 08:37:28.904838 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1\": container with ID starting with 0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1 not found: ID does not exist" containerID="0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1" Jan 22 08:37:28 crc kubenswrapper[4982]: I0122 08:37:28.904890 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1"} err="failed to get container status \"0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1\": rpc error: code = NotFound desc = could not find container \"0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1\": container with ID starting with 0eef8a6c468d919d9a672ba073e9c8fed1921ba8d0f3fd885f384014749494a1 not found: ID does not exist" Jan 22 08:37:29 crc kubenswrapper[4982]: I0122 08:37:29.749174 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" path="/var/lib/kubelet/pods/cefb7079-155c-4edf-8acf-5e918e26b4ed/volumes" Jan 22 08:37:48 crc kubenswrapper[4982]: I0122 08:37:48.973830 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:37:48 crc kubenswrapper[4982]: I0122 08:37:48.974619 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 08:37:48 crc kubenswrapper[4982]: I0122 08:37:48.974704 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" Jan 22 08:37:48 crc kubenswrapper[4982]: I0122 08:37:48.976122 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181"} pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 08:37:48 crc kubenswrapper[4982]: I0122 08:37:48.976247 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" containerID="cri-o://f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" gracePeriod=600 Jan 22 08:37:49 crc kubenswrapper[4982]: E0122 08:37:49.096437 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:37:50 crc kubenswrapper[4982]: I0122 08:37:50.020333 4982 generic.go:334] "Generic (PLEG): container finished" podID="2829369e-72ba-4637-853b-88f5cf242a0e" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" exitCode=0 Jan 22 08:37:50 crc kubenswrapper[4982]: I0122 08:37:50.020422 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerDied","Data":"f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181"} Jan 22 08:37:50 crc kubenswrapper[4982]: I0122 08:37:50.020672 4982 scope.go:117] "RemoveContainer" containerID="3c863d0caac21e6c4340c3e6f8ed5e1879e04b9c54ec97897ba983925af2095d" Jan 22 08:37:50 crc kubenswrapper[4982]: I0122 08:37:50.021473 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:37:50 crc kubenswrapper[4982]: E0122 08:37:50.021786 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:38:02 crc kubenswrapper[4982]: I0122 08:38:02.720042 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:38:02 crc kubenswrapper[4982]: E0122 08:38:02.721107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:38:16 crc kubenswrapper[4982]: I0122 08:38:16.720420 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:38:16 crc kubenswrapper[4982]: E0122 08:38:16.721756 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:38:28 crc kubenswrapper[4982]: I0122 08:38:28.721468 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:38:28 crc kubenswrapper[4982]: E0122 08:38:28.722655 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:38:40 crc kubenswrapper[4982]: I0122 08:38:40.719826 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:38:40 crc kubenswrapper[4982]: E0122 08:38:40.720866 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:38:55 crc kubenswrapper[4982]: I0122 08:38:55.726255 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:38:55 crc kubenswrapper[4982]: E0122 08:38:55.727176 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:39:06 crc kubenswrapper[4982]: I0122 08:39:06.720219 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:39:06 crc kubenswrapper[4982]: E0122 08:39:06.721298 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:39:21 crc kubenswrapper[4982]: I0122 08:39:21.719713 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:39:21 crc kubenswrapper[4982]: E0122 08:39:21.720630 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:39:35 crc kubenswrapper[4982]: I0122 08:39:35.719379 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:39:35 crc kubenswrapper[4982]: E0122 08:39:35.720479 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:39:46 crc kubenswrapper[4982]: I0122 08:39:46.720112 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:39:46 crc kubenswrapper[4982]: E0122 08:39:46.722463 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:40:01 crc kubenswrapper[4982]: I0122 08:40:01.724517 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:40:01 crc kubenswrapper[4982]: E0122 08:40:01.727034 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:40:14 crc kubenswrapper[4982]: I0122 08:40:14.720090 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:40:14 crc kubenswrapper[4982]: E0122 08:40:14.721387 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:40:29 crc kubenswrapper[4982]: I0122 08:40:29.736053 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:40:29 crc kubenswrapper[4982]: E0122 08:40:29.737007 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:40:40 crc kubenswrapper[4982]: I0122 08:40:40.719456 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:40:40 crc kubenswrapper[4982]: E0122 08:40:40.720511 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:40:55 crc kubenswrapper[4982]: I0122 08:40:55.719451 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:40:55 crc kubenswrapper[4982]: E0122 08:40:55.720537 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:41:09 crc kubenswrapper[4982]: I0122 08:41:09.734297 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:41:09 crc kubenswrapper[4982]: E0122 08:41:09.736107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.365669 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:12 crc kubenswrapper[4982]: E0122 08:41:12.367079 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="extract-content" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.367101 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="extract-content" Jan 22 08:41:12 crc kubenswrapper[4982]: E0122 08:41:12.367123 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="registry-server" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.367131 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="registry-server" Jan 22 08:41:12 crc kubenswrapper[4982]: E0122 08:41:12.367146 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="extract-utilities" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.367154 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="extract-utilities" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.367434 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cefb7079-155c-4edf-8acf-5e918e26b4ed" containerName="registry-server" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.369648 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.406077 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.480158 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.480241 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.480269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgcd4\" (UniqueName: \"kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.582355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.582469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.582502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgcd4\" (UniqueName: \"kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.583085 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.583123 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.605486 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgcd4\" (UniqueName: \"kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4\") pod \"community-operators-jr7b2\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:12 crc kubenswrapper[4982]: I0122 08:41:12.695271 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:13 crc kubenswrapper[4982]: I0122 08:41:13.259581 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:13 crc kubenswrapper[4982]: W0122 08:41:13.263728 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod160d6f3c_4117_413f_a657_2de09127229f.slice/crio-b3d3894a79273b22b3b284eb5903fa85ab3a5af407123671989c10ce2cf08b04 WatchSource:0}: Error finding container b3d3894a79273b22b3b284eb5903fa85ab3a5af407123671989c10ce2cf08b04: Status 404 returned error can't find the container with id b3d3894a79273b22b3b284eb5903fa85ab3a5af407123671989c10ce2cf08b04 Jan 22 08:41:13 crc kubenswrapper[4982]: I0122 08:41:13.642413 4982 generic.go:334] "Generic (PLEG): container finished" podID="160d6f3c-4117-413f-a657-2de09127229f" containerID="b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41" exitCode=0 Jan 22 08:41:13 crc kubenswrapper[4982]: I0122 08:41:13.642520 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerDied","Data":"b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41"} Jan 22 08:41:13 crc kubenswrapper[4982]: I0122 08:41:13.643589 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerStarted","Data":"b3d3894a79273b22b3b284eb5903fa85ab3a5af407123671989c10ce2cf08b04"} Jan 22 08:41:13 crc kubenswrapper[4982]: I0122 08:41:13.644327 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 08:41:14 crc kubenswrapper[4982]: I0122 08:41:14.654288 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerStarted","Data":"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e"} Jan 22 08:41:15 crc kubenswrapper[4982]: I0122 08:41:15.668538 4982 generic.go:334] "Generic (PLEG): container finished" podID="160d6f3c-4117-413f-a657-2de09127229f" containerID="2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e" exitCode=0 Jan 22 08:41:15 crc kubenswrapper[4982]: I0122 08:41:15.668590 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerDied","Data":"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e"} Jan 22 08:41:16 crc kubenswrapper[4982]: I0122 08:41:16.684178 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerStarted","Data":"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492"} Jan 22 08:41:16 crc kubenswrapper[4982]: I0122 08:41:16.712819 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jr7b2" podStartSLOduration=2.247807413 podStartE2EDuration="4.712785865s" podCreationTimestamp="2026-01-22 08:41:12 +0000 UTC" firstStartedPulling="2026-01-22 08:41:13.644101709 +0000 UTC m=+10534.482739712" lastFinishedPulling="2026-01-22 08:41:16.109080151 +0000 UTC m=+10536.947718164" observedRunningTime="2026-01-22 08:41:16.704782208 +0000 UTC m=+10537.543420301" watchObservedRunningTime="2026-01-22 08:41:16.712785865 +0000 UTC m=+10537.551423908" Jan 22 08:41:22 crc kubenswrapper[4982]: I0122 08:41:22.695906 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:22 crc kubenswrapper[4982]: I0122 08:41:22.696360 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:22 crc kubenswrapper[4982]: I0122 08:41:22.756482 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:22 crc kubenswrapper[4982]: I0122 08:41:22.824830 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:23 crc kubenswrapper[4982]: I0122 08:41:23.012710 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:23 crc kubenswrapper[4982]: I0122 08:41:23.721191 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:41:23 crc kubenswrapper[4982]: E0122 08:41:23.721747 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:41:24 crc kubenswrapper[4982]: I0122 08:41:24.771112 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jr7b2" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="registry-server" containerID="cri-o://fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492" gracePeriod=2 Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.339694 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.494684 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities\") pod \"160d6f3c-4117-413f-a657-2de09127229f\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.495127 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content\") pod \"160d6f3c-4117-413f-a657-2de09127229f\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.495302 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgcd4\" (UniqueName: \"kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4\") pod \"160d6f3c-4117-413f-a657-2de09127229f\" (UID: \"160d6f3c-4117-413f-a657-2de09127229f\") " Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.496453 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities" (OuterVolumeSpecName: "utilities") pod "160d6f3c-4117-413f-a657-2de09127229f" (UID: "160d6f3c-4117-413f-a657-2de09127229f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.504002 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4" (OuterVolumeSpecName: "kube-api-access-qgcd4") pod "160d6f3c-4117-413f-a657-2de09127229f" (UID: "160d6f3c-4117-413f-a657-2de09127229f"). InnerVolumeSpecName "kube-api-access-qgcd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.561630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "160d6f3c-4117-413f-a657-2de09127229f" (UID: "160d6f3c-4117-413f-a657-2de09127229f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.598239 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.598278 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/160d6f3c-4117-413f-a657-2de09127229f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.598289 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgcd4\" (UniqueName: \"kubernetes.io/projected/160d6f3c-4117-413f-a657-2de09127229f-kube-api-access-qgcd4\") on node \"crc\" DevicePath \"\"" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.782952 4982 generic.go:334] "Generic (PLEG): container finished" podID="160d6f3c-4117-413f-a657-2de09127229f" containerID="fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492" exitCode=0 Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.783011 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerDied","Data":"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492"} Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.783028 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jr7b2" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.783055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jr7b2" event={"ID":"160d6f3c-4117-413f-a657-2de09127229f","Type":"ContainerDied","Data":"b3d3894a79273b22b3b284eb5903fa85ab3a5af407123671989c10ce2cf08b04"} Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.783086 4982 scope.go:117] "RemoveContainer" containerID="fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.813839 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.814217 4982 scope.go:117] "RemoveContainer" containerID="2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.823006 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jr7b2"] Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.841522 4982 scope.go:117] "RemoveContainer" containerID="b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.883933 4982 scope.go:117] "RemoveContainer" containerID="fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492" Jan 22 08:41:25 crc kubenswrapper[4982]: E0122 08:41:25.890501 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492\": container with ID starting with fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492 not found: ID does not exist" containerID="fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.890574 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492"} err="failed to get container status \"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492\": rpc error: code = NotFound desc = could not find container \"fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492\": container with ID starting with fe2ba2440dc2d9bcaf16f970da563b5a8a18b419877bf8460c8cfc076232d492 not found: ID does not exist" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.890616 4982 scope.go:117] "RemoveContainer" containerID="2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e" Jan 22 08:41:25 crc kubenswrapper[4982]: E0122 08:41:25.891038 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e\": container with ID starting with 2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e not found: ID does not exist" containerID="2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.891104 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e"} err="failed to get container status \"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e\": rpc error: code = NotFound desc = could not find container \"2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e\": container with ID starting with 2dfa31bd7f8cc289d7a9efa01459cf9ba03f9cba630339960469116d6124701e not found: ID does not exist" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.891145 4982 scope.go:117] "RemoveContainer" containerID="b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41" Jan 22 08:41:25 crc kubenswrapper[4982]: E0122 08:41:25.891425 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41\": container with ID starting with b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41 not found: ID does not exist" containerID="b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41" Jan 22 08:41:25 crc kubenswrapper[4982]: I0122 08:41:25.891458 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41"} err="failed to get container status \"b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41\": rpc error: code = NotFound desc = could not find container \"b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41\": container with ID starting with b325e64d1a15e76605b5aa0481fe8503677028f40185b5927d20bcc44fbf3d41 not found: ID does not exist" Jan 22 08:41:27 crc kubenswrapper[4982]: I0122 08:41:27.744814 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="160d6f3c-4117-413f-a657-2de09127229f" path="/var/lib/kubelet/pods/160d6f3c-4117-413f-a657-2de09127229f/volumes" Jan 22 08:41:38 crc kubenswrapper[4982]: I0122 08:41:38.719898 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:41:38 crc kubenswrapper[4982]: E0122 08:41:38.720504 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:41:51 crc kubenswrapper[4982]: I0122 08:41:51.723446 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:41:51 crc kubenswrapper[4982]: E0122 08:41:51.724198 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.887671 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:41:58 crc kubenswrapper[4982]: E0122 08:41:58.895082 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="extract-content" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.895112 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="extract-content" Jan 22 08:41:58 crc kubenswrapper[4982]: E0122 08:41:58.895126 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="registry-server" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.895132 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="registry-server" Jan 22 08:41:58 crc kubenswrapper[4982]: E0122 08:41:58.895152 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="extract-utilities" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.895159 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="extract-utilities" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.895381 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="160d6f3c-4117-413f-a657-2de09127229f" containerName="registry-server" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.896949 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:58 crc kubenswrapper[4982]: I0122 08:41:58.906908 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.009621 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.009704 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6xr2\" (UniqueName: \"kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.010047 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.111954 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.112068 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.112108 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6xr2\" (UniqueName: \"kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.112602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.112620 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.136588 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6xr2\" (UniqueName: \"kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2\") pod \"redhat-operators-z98mt\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.220909 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:41:59 crc kubenswrapper[4982]: I0122 08:41:59.711264 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:42:00 crc kubenswrapper[4982]: I0122 08:42:00.170089 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerID="1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17" exitCode=0 Jan 22 08:42:00 crc kubenswrapper[4982]: I0122 08:42:00.170188 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerDied","Data":"1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17"} Jan 22 08:42:00 crc kubenswrapper[4982]: I0122 08:42:00.170370 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerStarted","Data":"6f35a250ec3e1b7bef8d9b0540472b0256164ace0b84ea0edddcd871a08f5848"} Jan 22 08:42:01 crc kubenswrapper[4982]: I0122 08:42:01.180275 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerStarted","Data":"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b"} Jan 22 08:42:05 crc kubenswrapper[4982]: I0122 08:42:05.720797 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:42:05 crc kubenswrapper[4982]: E0122 08:42:05.722663 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:42:06 crc kubenswrapper[4982]: I0122 08:42:06.239613 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerID="e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b" exitCode=0 Jan 22 08:42:06 crc kubenswrapper[4982]: I0122 08:42:06.239707 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerDied","Data":"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b"} Jan 22 08:42:08 crc kubenswrapper[4982]: I0122 08:42:08.270562 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerStarted","Data":"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096"} Jan 22 08:42:08 crc kubenswrapper[4982]: I0122 08:42:08.299777 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z98mt" podStartSLOduration=3.782025375 podStartE2EDuration="10.29975887s" podCreationTimestamp="2026-01-22 08:41:58 +0000 UTC" firstStartedPulling="2026-01-22 08:42:00.171565951 +0000 UTC m=+10581.010203954" lastFinishedPulling="2026-01-22 08:42:06.689299446 +0000 UTC m=+10587.527937449" observedRunningTime="2026-01-22 08:42:08.287310672 +0000 UTC m=+10589.125948695" watchObservedRunningTime="2026-01-22 08:42:08.29975887 +0000 UTC m=+10589.138396873" Jan 22 08:42:09 crc kubenswrapper[4982]: I0122 08:42:09.221553 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:09 crc kubenswrapper[4982]: I0122 08:42:09.221593 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:10 crc kubenswrapper[4982]: I0122 08:42:10.288508 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z98mt" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="registry-server" probeResult="failure" output=< Jan 22 08:42:10 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Jan 22 08:42:10 crc kubenswrapper[4982]: > Jan 22 08:42:18 crc kubenswrapper[4982]: I0122 08:42:18.719984 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:42:18 crc kubenswrapper[4982]: E0122 08:42:18.720975 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:42:19 crc kubenswrapper[4982]: I0122 08:42:19.918897 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:19 crc kubenswrapper[4982]: I0122 08:42:19.987660 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:20 crc kubenswrapper[4982]: I0122 08:42:20.169184 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:42:21 crc kubenswrapper[4982]: I0122 08:42:21.410218 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z98mt" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="registry-server" containerID="cri-o://84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096" gracePeriod=2 Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.370219 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.423045 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerID="84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096" exitCode=0 Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.423128 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerDied","Data":"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096"} Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.423223 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z98mt" event={"ID":"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0","Type":"ContainerDied","Data":"6f35a250ec3e1b7bef8d9b0540472b0256164ace0b84ea0edddcd871a08f5848"} Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.423150 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z98mt" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.423357 4982 scope.go:117] "RemoveContainer" containerID="84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.448681 4982 scope.go:117] "RemoveContainer" containerID="e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.470326 4982 scope.go:117] "RemoveContainer" containerID="1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.515603 4982 scope.go:117] "RemoveContainer" containerID="84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096" Jan 22 08:42:22 crc kubenswrapper[4982]: E0122 08:42:22.516075 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096\": container with ID starting with 84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096 not found: ID does not exist" containerID="84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.516117 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096"} err="failed to get container status \"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096\": rpc error: code = NotFound desc = could not find container \"84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096\": container with ID starting with 84890f87657c67db8c65ac4c908eef9e82b79201b8023841eaf69576c0a58096 not found: ID does not exist" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.516143 4982 scope.go:117] "RemoveContainer" containerID="e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b" Jan 22 08:42:22 crc kubenswrapper[4982]: E0122 08:42:22.516574 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b\": container with ID starting with e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b not found: ID does not exist" containerID="e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.516598 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b"} err="failed to get container status \"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b\": rpc error: code = NotFound desc = could not find container \"e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b\": container with ID starting with e382a1facd8b0118156a58104a939d84bc065fa850a75004c3ead66c7de69c1b not found: ID does not exist" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.516612 4982 scope.go:117] "RemoveContainer" containerID="1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17" Jan 22 08:42:22 crc kubenswrapper[4982]: E0122 08:42:22.516916 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17\": container with ID starting with 1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17 not found: ID does not exist" containerID="1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.516941 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17"} err="failed to get container status \"1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17\": rpc error: code = NotFound desc = could not find container \"1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17\": container with ID starting with 1595e12dd822cc7cc68358d6450cfd92a909da13bbcbeeb9d4de9b948c5afd17 not found: ID does not exist" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.529278 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content\") pod \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.529482 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6xr2\" (UniqueName: \"kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2\") pod \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.529701 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities\") pod \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\" (UID: \"6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0\") " Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.531059 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities" (OuterVolumeSpecName: "utilities") pod "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" (UID: "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.536486 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2" (OuterVolumeSpecName: "kube-api-access-d6xr2") pod "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" (UID: "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0"). InnerVolumeSpecName "kube-api-access-d6xr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.632738 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6xr2\" (UniqueName: \"kubernetes.io/projected/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-kube-api-access-d6xr2\") on node \"crc\" DevicePath \"\"" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.632987 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.663736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" (UID: "6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.734820 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.787437 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:42:22 crc kubenswrapper[4982]: I0122 08:42:22.799321 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z98mt"] Jan 22 08:42:23 crc kubenswrapper[4982]: I0122 08:42:23.737102 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" path="/var/lib/kubelet/pods/6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0/volumes" Jan 22 08:42:29 crc kubenswrapper[4982]: I0122 08:42:29.726052 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:42:29 crc kubenswrapper[4982]: E0122 08:42:29.727335 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:42:42 crc kubenswrapper[4982]: I0122 08:42:42.719760 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:42:42 crc kubenswrapper[4982]: E0122 08:42:42.720640 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gdpxx_openshift-machine-config-operator(2829369e-72ba-4637-853b-88f5cf242a0e)\"" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" Jan 22 08:42:54 crc kubenswrapper[4982]: I0122 08:42:54.719795 4982 scope.go:117] "RemoveContainer" containerID="f22e98b8cf5a7eb41b64f748199e93d0d5135ebbb76b5acbef9d1ce457671181" Jan 22 08:42:55 crc kubenswrapper[4982]: I0122 08:42:55.835523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" event={"ID":"2829369e-72ba-4637-853b-88f5cf242a0e","Type":"ContainerStarted","Data":"df56197df17a625dfadd26d66cf794aac2cd60543f63dee5b29c2ddbe600b650"} Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.980265 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:44:56 crc kubenswrapper[4982]: E0122 08:44:56.984401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="extract-content" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.984425 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="extract-content" Jan 22 08:44:56 crc kubenswrapper[4982]: E0122 08:44:56.984474 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="registry-server" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.984483 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="registry-server" Jan 22 08:44:56 crc kubenswrapper[4982]: E0122 08:44:56.984500 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="extract-utilities" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.984510 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="extract-utilities" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.984778 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0aa5ff-a9d9-4882-9dce-8b44246ed2e0" containerName="registry-server" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.986870 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:56 crc kubenswrapper[4982]: I0122 08:44:56.999462 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.141530 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.141698 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5xrs\" (UniqueName: \"kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.141822 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.244614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5xrs\" (UniqueName: \"kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.245069 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.245218 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.245662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.245750 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.282825 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5xrs\" (UniqueName: \"kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs\") pod \"certified-operators-c6pps\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.335004 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:44:57 crc kubenswrapper[4982]: I0122 08:44:57.956225 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:44:58 crc kubenswrapper[4982]: I0122 08:44:58.254924 4982 generic.go:334] "Generic (PLEG): container finished" podID="55424852-bf64-4235-87cf-a27410e4f310" containerID="481452ace615ee29f6d1fc6003adccbca475c5fe97e1b8f86e306a869f001522" exitCode=0 Jan 22 08:44:58 crc kubenswrapper[4982]: I0122 08:44:58.254967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerDied","Data":"481452ace615ee29f6d1fc6003adccbca475c5fe97e1b8f86e306a869f001522"} Jan 22 08:44:58 crc kubenswrapper[4982]: I0122 08:44:58.254994 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerStarted","Data":"9b955977757fa7fea82450d7e4cc287abb5a61635388b22fe50a7c4a0340a597"} Jan 22 08:44:59 crc kubenswrapper[4982]: I0122 08:44:59.267248 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerStarted","Data":"3ae1d195ba6cc72af028f05ea3dfde958562c7fc737b143d0fcdad442a667993"} Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.187572 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg"] Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.191609 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.194145 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.195028 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.197872 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg"] Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.277889 4982 generic.go:334] "Generic (PLEG): container finished" podID="55424852-bf64-4235-87cf-a27410e4f310" containerID="3ae1d195ba6cc72af028f05ea3dfde958562c7fc737b143d0fcdad442a667993" exitCode=0 Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.277943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerDied","Data":"3ae1d195ba6cc72af028f05ea3dfde958562c7fc737b143d0fcdad442a667993"} Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.322243 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.322283 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28h5b\" (UniqueName: \"kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.322346 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.425058 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.425494 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28h5b\" (UniqueName: \"kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.425777 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.430074 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.436275 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.447581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28h5b\" (UniqueName: \"kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b\") pod \"collect-profiles-29484525-hq8xg\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:00 crc kubenswrapper[4982]: I0122 08:45:00.517918 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:01 crc kubenswrapper[4982]: W0122 08:45:01.003185 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3665f04_6634_44a7_8862_5afb97d5e209.slice/crio-1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003 WatchSource:0}: Error finding container 1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003: Status 404 returned error can't find the container with id 1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003 Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.003911 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg"] Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.294748 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" event={"ID":"f3665f04-6634-44a7-8862-5afb97d5e209","Type":"ContainerStarted","Data":"2dcaf14a591bcad9893b687ca03970fd427ec8a3b2e61716cc8f35df421926fe"} Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.296733 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" event={"ID":"f3665f04-6634-44a7-8862-5afb97d5e209","Type":"ContainerStarted","Data":"1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003"} Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.299444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerStarted","Data":"2326a282b7437c0f5423ac6c61d3afea0d175b29c5aa3db69ae7fcfa727c30cf"} Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.318703 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" podStartSLOduration=1.31868191 podStartE2EDuration="1.31868191s" podCreationTimestamp="2026-01-22 08:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 08:45:01.31645572 +0000 UTC m=+10762.155093723" watchObservedRunningTime="2026-01-22 08:45:01.31868191 +0000 UTC m=+10762.157319913" Jan 22 08:45:01 crc kubenswrapper[4982]: I0122 08:45:01.339969 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c6pps" podStartSLOduration=2.86409578 podStartE2EDuration="5.339948626s" podCreationTimestamp="2026-01-22 08:44:56 +0000 UTC" firstStartedPulling="2026-01-22 08:44:58.259203365 +0000 UTC m=+10759.097841388" lastFinishedPulling="2026-01-22 08:45:00.735056231 +0000 UTC m=+10761.573694234" observedRunningTime="2026-01-22 08:45:01.33456324 +0000 UTC m=+10762.173201263" watchObservedRunningTime="2026-01-22 08:45:01.339948626 +0000 UTC m=+10762.178586619" Jan 22 08:45:02 crc kubenswrapper[4982]: I0122 08:45:02.313608 4982 generic.go:334] "Generic (PLEG): container finished" podID="f3665f04-6634-44a7-8862-5afb97d5e209" containerID="2dcaf14a591bcad9893b687ca03970fd427ec8a3b2e61716cc8f35df421926fe" exitCode=0 Jan 22 08:45:02 crc kubenswrapper[4982]: I0122 08:45:02.313691 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" event={"ID":"f3665f04-6634-44a7-8862-5afb97d5e209","Type":"ContainerDied","Data":"2dcaf14a591bcad9893b687ca03970fd427ec8a3b2e61716cc8f35df421926fe"} Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.776253 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.910388 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume\") pod \"f3665f04-6634-44a7-8862-5afb97d5e209\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.910639 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume\") pod \"f3665f04-6634-44a7-8862-5afb97d5e209\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.910754 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28h5b\" (UniqueName: \"kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b\") pod \"f3665f04-6634-44a7-8862-5afb97d5e209\" (UID: \"f3665f04-6634-44a7-8862-5afb97d5e209\") " Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.911509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume" (OuterVolumeSpecName: "config-volume") pod "f3665f04-6634-44a7-8862-5afb97d5e209" (UID: "f3665f04-6634-44a7-8862-5afb97d5e209"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.912014 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3665f04-6634-44a7-8862-5afb97d5e209-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.917436 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f3665f04-6634-44a7-8862-5afb97d5e209" (UID: "f3665f04-6634-44a7-8862-5afb97d5e209"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 08:45:03 crc kubenswrapper[4982]: I0122 08:45:03.917666 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b" (OuterVolumeSpecName: "kube-api-access-28h5b") pod "f3665f04-6634-44a7-8862-5afb97d5e209" (UID: "f3665f04-6634-44a7-8862-5afb97d5e209"). InnerVolumeSpecName "kube-api-access-28h5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:45:04 crc kubenswrapper[4982]: I0122 08:45:04.014439 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3665f04-6634-44a7-8862-5afb97d5e209-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:04 crc kubenswrapper[4982]: I0122 08:45:04.014733 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28h5b\" (UniqueName: \"kubernetes.io/projected/f3665f04-6634-44a7-8862-5afb97d5e209-kube-api-access-28h5b\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:04 crc kubenswrapper[4982]: I0122 08:45:04.338440 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" event={"ID":"f3665f04-6634-44a7-8862-5afb97d5e209","Type":"ContainerDied","Data":"1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003"} Jan 22 08:45:04 crc kubenswrapper[4982]: I0122 08:45:04.338475 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e1b95ac35f76420344832788c387ec691e7446b16e45030d6e7bada4652b003" Jan 22 08:45:04 crc kubenswrapper[4982]: I0122 08:45:04.338523 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484525-hq8xg" Jan 22 08:45:05 crc kubenswrapper[4982]: I0122 08:45:05.024585 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72"] Jan 22 08:45:05 crc kubenswrapper[4982]: I0122 08:45:05.038839 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484480-tgw72"] Jan 22 08:45:05 crc kubenswrapper[4982]: I0122 08:45:05.739229 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4" path="/var/lib/kubelet/pods/f009b501-ecfb-4a4c-93ab-4b73e3b3e5a4/volumes" Jan 22 08:45:07 crc kubenswrapper[4982]: I0122 08:45:07.335706 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:07 crc kubenswrapper[4982]: I0122 08:45:07.335776 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:07 crc kubenswrapper[4982]: I0122 08:45:07.946310 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-nmstate/nmstate-handler-q2hd9" podUID="2d9b1a0a-d7ca-4e76-ab0d-40cc990e4fb8" containerName="nmstate-handler" probeResult="failure" output="command timed out" Jan 22 08:45:08 crc kubenswrapper[4982]: I0122 08:45:08.197949 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:08 crc kubenswrapper[4982]: I0122 08:45:08.285542 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:08 crc kubenswrapper[4982]: I0122 08:45:08.754400 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:45:09 crc kubenswrapper[4982]: I0122 08:45:09.392472 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c6pps" podUID="55424852-bf64-4235-87cf-a27410e4f310" containerName="registry-server" containerID="cri-o://2326a282b7437c0f5423ac6c61d3afea0d175b29c5aa3db69ae7fcfa727c30cf" gracePeriod=2 Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.408846 4982 generic.go:334] "Generic (PLEG): container finished" podID="55424852-bf64-4235-87cf-a27410e4f310" containerID="2326a282b7437c0f5423ac6c61d3afea0d175b29c5aa3db69ae7fcfa727c30cf" exitCode=0 Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.409768 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerDied","Data":"2326a282b7437c0f5423ac6c61d3afea0d175b29c5aa3db69ae7fcfa727c30cf"} Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.410055 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6pps" event={"ID":"55424852-bf64-4235-87cf-a27410e4f310","Type":"ContainerDied","Data":"9b955977757fa7fea82450d7e4cc287abb5a61635388b22fe50a7c4a0340a597"} Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.410077 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b955977757fa7fea82450d7e4cc287abb5a61635388b22fe50a7c4a0340a597" Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.484950 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.575123 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5xrs\" (UniqueName: \"kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs\") pod \"55424852-bf64-4235-87cf-a27410e4f310\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.575361 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities\") pod \"55424852-bf64-4235-87cf-a27410e4f310\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.575450 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content\") pod \"55424852-bf64-4235-87cf-a27410e4f310\" (UID: \"55424852-bf64-4235-87cf-a27410e4f310\") " Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.576379 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities" (OuterVolumeSpecName: "utilities") pod "55424852-bf64-4235-87cf-a27410e4f310" (UID: "55424852-bf64-4235-87cf-a27410e4f310"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.588389 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs" (OuterVolumeSpecName: "kube-api-access-w5xrs") pod "55424852-bf64-4235-87cf-a27410e4f310" (UID: "55424852-bf64-4235-87cf-a27410e4f310"). InnerVolumeSpecName "kube-api-access-w5xrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.677201 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:10 crc kubenswrapper[4982]: I0122 08:45:10.677235 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5xrs\" (UniqueName: \"kubernetes.io/projected/55424852-bf64-4235-87cf-a27410e4f310-kube-api-access-w5xrs\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.037527 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55424852-bf64-4235-87cf-a27410e4f310" (UID: "55424852-bf64-4235-87cf-a27410e4f310"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.085397 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55424852-bf64-4235-87cf-a27410e4f310-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.418820 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6pps" Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.470272 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.482072 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c6pps"] Jan 22 08:45:11 crc kubenswrapper[4982]: I0122 08:45:11.760841 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55424852-bf64-4235-87cf-a27410e4f310" path="/var/lib/kubelet/pods/55424852-bf64-4235-87cf-a27410e4f310/volumes" Jan 22 08:45:12 crc kubenswrapper[4982]: I0122 08:45:12.282786 4982 scope.go:117] "RemoveContainer" containerID="89238474789a57e4783d160c7fcfffaed698443f1f53bf0e02f6f74abf9e63ca" Jan 22 08:45:18 crc kubenswrapper[4982]: I0122 08:45:18.974480 4982 patch_prober.go:28] interesting pod/machine-config-daemon-gdpxx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 08:45:18 crc kubenswrapper[4982]: I0122 08:45:18.977277 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gdpxx" podUID="2829369e-72ba-4637-853b-88f5cf242a0e" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134361677024462 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134361700017362 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134334264016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134334264015462 5ustar corecore